summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--META-INF/MANIFEST.MF2
-rw-r--r--README43
-rw-r--r--build.xml321
-rw-r--r--docs/examples/parsing/lambda/TestParser.scala2
-rw-r--r--docs/examples/pilib/elasticBuffer.scala2
-rw-r--r--docs/examples/pilib/mobilePhoneProtocol.scala2
-rw-r--r--docs/examples/plugintemplate/src/plugintemplate/standalone/Main.scala4
l---------lib/ScalaCheck.jar1
-rw-r--r--lib/fjbg.jar.desired.sha12
-rw-r--r--lib/msil.jar.desired.sha12
-rw-r--r--lib/scala-compiler.jar.desired.sha12
-rw-r--r--lib/scala-library-src.jar.desired.sha12
-rw-r--r--lib/scala-library.jar.desired.sha12
-rw-r--r--lib/scalacheck-1.6dev.jar.desired.sha11
-rw-r--r--lib/scalacheck.jar.desired.sha11
-rw-r--r--scala-lang.ipr1527
-rw-r--r--src/actors/scala/actors/AbstractActor.scala5
-rw-r--r--src/actors/scala/actors/AbstractReactor.scala1
-rw-r--r--src/actors/scala/actors/Actor.scala256
-rw-r--r--src/actors/scala/actors/ActorCanReply.scala (renamed from src/actors/scala/actors/ReplyableActor.scala)39
-rw-r--r--src/actors/scala/actors/ActorProxy.scala1
-rw-r--r--src/actors/scala/actors/ActorTask.scala11
-rw-r--r--src/actors/scala/actors/CanReply.scala (renamed from src/actors/scala/actors/Replyable.scala)34
-rw-r--r--src/actors/scala/actors/Channel.scala2
-rw-r--r--src/actors/scala/actors/Combinators.scala39
-rw-r--r--src/actors/scala/actors/Future.scala40
-rw-r--r--src/actors/scala/actors/IScheduler.scala6
-rw-r--r--src/actors/scala/actors/InputChannel.scala1
-rw-r--r--src/actors/scala/actors/MessageQueue.scala48
-rw-r--r--src/actors/scala/actors/OutputChannel.scala3
-rw-r--r--src/actors/scala/actors/ReactChannel.scala2
-rw-r--r--src/actors/scala/actors/Reaction.scala14
-rw-r--r--src/actors/scala/actors/Reactor.scala202
-rw-r--r--src/actors/scala/actors/ReactorCanReply.scala (renamed from src/actors/scala/actors/ReplyableReactor.scala)17
-rw-r--r--src/actors/scala/actors/ReactorTask.scala46
-rw-r--r--src/actors/scala/actors/ReplyReactor.scala105
-rw-r--r--src/actors/scala/actors/ReplyReactorTask.scala36
-rw-r--r--src/actors/scala/actors/Scheduler.scala2
-rw-r--r--src/actors/scala/actors/SchedulerAdapter.scala6
-rw-r--r--src/actors/scala/actors/UncaughtException.scala31
-rw-r--r--src/actors/scala/actors/package.scala16
-rw-r--r--src/actors/scala/actors/remote/NetKernel.scala2
-rw-r--r--src/actors/scala/actors/scheduler/ActorGC.scala (renamed from src/actors/scala/actors/ActorGC.scala)18
-rw-r--r--src/actors/scala/actors/scheduler/DaemonScheduler.scala2
-rw-r--r--src/actors/scala/actors/scheduler/DefaultThreadPoolScheduler.scala49
-rw-r--r--src/actors/scala/actors/scheduler/DelegatingScheduler.scala6
-rw-r--r--src/actors/scala/actors/scheduler/ExecutorScheduler.scala45
-rw-r--r--src/actors/scala/actors/scheduler/ForkJoinScheduler.scala24
-rw-r--r--src/actors/scala/actors/scheduler/QuitControl.scala19
-rw-r--r--src/actors/scala/actors/scheduler/ResizableThreadPoolScheduler.scala8
-rw-r--r--src/actors/scala/actors/scheduler/SchedulerService.scala80
-rw-r--r--src/actors/scala/actors/scheduler/SimpleExecutorScheduler.scala43
-rw-r--r--src/actors/scala/actors/scheduler/SingleThreadedScheduler.scala6
-rw-r--r--src/actors/scala/actors/scheduler/TerminationMonitor.scala11
-rw-r--r--src/actors/scala/actors/scheduler/TerminationService.scala22
-rw-r--r--src/actors/scala/actors/scheduler/ThreadPoolConfig.scala46
-rw-r--r--src/actors/scala/actors/scheduler/ThreadPoolScheduler.scala81
-rw-r--r--src/attic/README2
-rw-r--r--src/attic/scala/tools/nsc/models/Models.scala (renamed from src/compiler/scala/tools/nsc/models/Models.scala)4
-rw-r--r--src/attic/scala/tools/nsc/models/SemanticTokens.scala (renamed from src/compiler/scala/tools/nsc/models/SemanticTokens.scala)14
-rw-r--r--src/attic/scala/tools/nsc/models/Signatures.scala (renamed from src/compiler/scala/tools/nsc/models/Signatures.scala)2
-rw-r--r--src/attic/scala/tools/nsc/symtab/SymbolWalker.scala (renamed from src/compiler/scala/tools/nsc/symtab/SymbolWalker.scala)3
-rw-r--r--src/build/maven/continuations-plugin-pom.xml51
-rw-r--r--src/build/maven/maven-deploy.xml51
-rw-r--r--src/build/pack.xml20
-rw-r--r--src/compiler/scala/tools/ant/Pack200Task.scala4
-rw-r--r--src/compiler/scala/tools/ant/ScalaBazaar.scala6
-rw-r--r--src/compiler/scala/tools/ant/ScalaTool.scala6
-rw-r--r--src/compiler/scala/tools/ant/Scalac.scala16
-rw-r--r--src/compiler/scala/tools/ant/ScalacShared.scala25
-rw-r--r--src/compiler/scala/tools/ant/Scaladoc.scala32
-rw-r--r--src/compiler/scala/tools/ant/sabbus/ForeignCompiler.scala4
-rw-r--r--src/compiler/scala/tools/ant/sabbus/ScalacFork.scala128
-rw-r--r--src/compiler/scala/tools/ant/sabbus/TaskArgs.scala40
-rw-r--r--src/compiler/scala/tools/ant/sabbus/Use.scala2
-rw-r--r--src/compiler/scala/tools/ant/templates/tool-unix.tmpl15
-rw-r--r--src/compiler/scala/tools/nsc/CompileClient.scala72
-rw-r--r--src/compiler/scala/tools/nsc/CompileServer.scala121
-rw-r--r--src/compiler/scala/tools/nsc/CompileSocket.scala116
-rw-r--r--src/compiler/scala/tools/nsc/CompilerCommand.scala111
-rw-r--r--src/compiler/scala/tools/nsc/EvalLoop.scala19
-rw-r--r--src/compiler/scala/tools/nsc/FatalError.scala4
-rw-r--r--src/compiler/scala/tools/nsc/GenericRunnerCommand.scala95
-rw-r--r--src/compiler/scala/tools/nsc/GenericRunnerSettings.scala2
-rw-r--r--src/compiler/scala/tools/nsc/Global.scala259
-rw-r--r--src/compiler/scala/tools/nsc/Interpreter.scala826
-rw-r--r--src/compiler/scala/tools/nsc/InterpreterCommand.scala3
-rw-r--r--src/compiler/scala/tools/nsc/InterpreterLoop.scala355
-rw-r--r--src/compiler/scala/tools/nsc/InterpreterSettings.scala35
-rw-r--r--src/compiler/scala/tools/nsc/Main.scala23
-rw-r--r--src/compiler/scala/tools/nsc/MainGenericRunner.scala109
-rw-r--r--src/compiler/scala/tools/nsc/MainTokenMetric.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ObjectRunner.scala3
-rw-r--r--src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala7
-rw-r--r--src/compiler/scala/tools/nsc/PhaseAssembly.scala21
-rw-r--r--src/compiler/scala/tools/nsc/Properties.scala14
-rw-r--r--src/compiler/scala/tools/nsc/ScalaDoc.scala12
-rw-r--r--src/compiler/scala/tools/nsc/ScriptRunner.scala60
-rw-r--r--src/compiler/scala/tools/nsc/Settings.scala943
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/DocComments.scala17
-rw-r--r--src/compiler/scala/tools/nsc/ast/NodePrinters.scala70
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeDSL.scala11
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeGen.scala15
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeInfo.scala19
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreePrinters.scala338
-rw-r--r--src/compiler/scala/tools/nsc/ast/Trees.scala1048
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala186
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala51
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Scanners.scala17
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala10
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Tokens.scala69
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala88
-rw-r--r--src/compiler/scala/tools/nsc/backend/JavaPlatform.scala41
-rw-r--r--src/compiler/scala/tools/nsc/backend/MSILPlatform.scala36
-rw-r--r--src/compiler/scala/tools/nsc/backend/Platform.scala31
-rw-r--r--src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala6
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala10
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/CheckerException.scala (renamed from src/compiler/scala/tools/nsc/backend/icode/CheckerError.scala)2
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Checkers.scala20
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/GenICode.scala193
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/ICodes.scala19
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Members.scala4
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala6
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala14
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala3
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/CompleteLattice.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala37
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/LubException.scala (renamed from src/compiler/scala/tools/nsc/backend/icode/analysis/LubError.scala)2
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala4
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala145
-rw-r--r--src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala271
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala6
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala23
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/Inliners.scala25
-rw-r--r--src/compiler/scala/tools/nsc/dependencies/Changes.scala71
-rw-r--r--src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala45
-rw-r--r--src/compiler/scala/tools/nsc/doc/DocFactory.scala3
-rw-r--r--src/compiler/scala/tools/nsc/doc/DocProvider.scala3
-rw-r--r--src/compiler/scala/tools/nsc/doc/Settings.scala5
-rw-r--r--src/compiler/scala/tools/nsc/doc/SourcelessComments.scala339
-rw-r--r--src/compiler/scala/tools/nsc/doc/Universe.scala8
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala50
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala39
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/Index.scala44
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/Source.scala2
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/Template.scala145
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.pngbin0 -> 3519 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.psdbin0 -> 31923 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.pngbin0 -> 2977 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.psdbin0 -> 28574 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css85
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js352
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.js165
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.pngbin0 -> 3186 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.psdbin0 -> 28904 bytes
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/scheduler.js71
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css86
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js105
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/tools.tooltip.js14
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/versions.txt2
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/Entity.scala58
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala325
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/TypeEntity.scala2
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/comment/Body.scala5
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/comment/Comment.scala40
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala455
-rw-r--r--src/compiler/scala/tools/nsc/interactive/BuildManager.scala5
-rw-r--r--src/compiler/scala/tools/nsc/interactive/CompilerControl.scala20
-rw-r--r--src/compiler/scala/tools/nsc/interactive/Global.scala94
-rw-r--r--src/compiler/scala/tools/nsc/interactive/REPL.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interactive/RangePositions.scala6
-rw-r--r--src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala82
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ByteCode.scala43
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/Completion.scala399
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala113
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/Delimited.scala36
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/FileCompletion.scala54
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/History.scala36
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/IdentCompletion.scala25
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala18
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/JLineReader.scala31
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/LiteralCompletion.scala50
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/PackageCompletion.scala187
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/Parsed.scala63
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ProductCompletion.scala42
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ReflectionCompletion.scala126
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/SimpleReader.scala3
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/XMLCompletion.scala43
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/package.scala25
-rw-r--r--src/compiler/scala/tools/nsc/io/AbstractFile.scala24
-rw-r--r--src/compiler/scala/tools/nsc/io/Directory.scala32
-rw-r--r--src/compiler/scala/tools/nsc/io/File.scala57
-rw-r--r--src/compiler/scala/tools/nsc/io/Path.scala98
-rw-r--r--src/compiler/scala/tools/nsc/io/PlainFile.scala6
-rw-r--r--src/compiler/scala/tools/nsc/io/Process.scala32
-rw-r--r--src/compiler/scala/tools/nsc/io/Socket.scala46
-rw-r--r--src/compiler/scala/tools/nsc/io/SourceReader.scala2
-rw-r--r--src/compiler/scala/tools/nsc/io/Streamable.scala8
-rw-r--r--src/compiler/scala/tools/nsc/io/VirtualDirectory.scala13
-rw-r--r--src/compiler/scala/tools/nsc/io/VirtualFile.scala13
-rw-r--r--src/compiler/scala/tools/nsc/io/ZipArchive.scala27
-rw-r--r--src/compiler/scala/tools/nsc/javac/JavaParsers.scala4
-rw-r--r--src/compiler/scala/tools/nsc/javac/JavaScanners.scala32
-rw-r--r--src/compiler/scala/tools/nsc/javac/JavaTokens.scala31
-rw-r--r--src/compiler/scala/tools/nsc/matching/MatchSupport.scala157
-rw-r--r--src/compiler/scala/tools/nsc/matching/Matrix.scala5
-rw-r--r--src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala50
-rw-r--r--src/compiler/scala/tools/nsc/matching/ParallelMatching.scala36
-rw-r--r--src/compiler/scala/tools/nsc/matching/Patterns.scala5
-rw-r--r--src/compiler/scala/tools/nsc/matching/TransMatcher.scala6
-rw-r--r--src/compiler/scala/tools/nsc/plugins/Plugin.scala8
-rw-r--r--src/compiler/scala/tools/nsc/plugins/Plugins.scala8
-rw-r--r--src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala10
-rw-r--r--src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala15
-rw-r--r--src/compiler/scala/tools/nsc/reporters/Reporter.scala3
-rw-r--r--src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala40
-rw-r--r--src/compiler/scala/tools/nsc/settings/AbsSettings.scala134
-rw-r--r--src/compiler/scala/tools/nsc/settings/AdvancedScalaSettings.scala76
-rw-r--r--src/compiler/scala/tools/nsc/settings/ImmutableSettings.scala11
-rw-r--r--src/compiler/scala/tools/nsc/settings/MutableSettings.scala567
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaSettings.scala164
-rw-r--r--src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala54
-rw-r--r--src/compiler/scala/tools/nsc/symtab/AnnotationCheckers.scala36
-rw-r--r--src/compiler/scala/tools/nsc/symtab/AnnotationInfos.scala25
-rw-r--r--src/compiler/scala/tools/nsc/symtab/Definitions.scala112
-rw-r--r--src/compiler/scala/tools/nsc/symtab/Flags.scala193
-rw-r--r--src/compiler/scala/tools/nsc/symtab/Names.scala9
-rw-r--r--src/compiler/scala/tools/nsc/symtab/Positions.scala5
-rw-r--r--src/compiler/scala/tools/nsc/symtab/Scopes.scala44
-rw-r--r--src/compiler/scala/tools/nsc/symtab/StdNames.scala43
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala29
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolTable.scala11
-rw-r--r--src/compiler/scala/tools/nsc/symtab/Symbols.scala412
-rw-r--r--src/compiler/scala/tools/nsc/symtab/Types.scala673
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileConstants.scala2
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala194
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala42
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/MetaParser.scala3
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala41
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/UnPickler.scala763
-rw-r--r--src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala11
-rw-r--r--src/compiler/scala/tools/nsc/transform/AddInterfaces.scala6
-rw-r--r--src/compiler/scala/tools/nsc/transform/CleanUp.scala141
-rw-r--r--src/compiler/scala/tools/nsc/transform/Constructors.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/Erasure.scala76
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala18
-rw-r--r--src/compiler/scala/tools/nsc/transform/LambdaLift.scala5
-rw-r--r--src/compiler/scala/tools/nsc/transform/LazyVals.scala6
-rw-r--r--src/compiler/scala/tools/nsc/transform/LiftCode.scala4
-rw-r--r--src/compiler/scala/tools/nsc/transform/Mixin.scala17
-rw-r--r--src/compiler/scala/tools/nsc/transform/OverridingPairs.scala14
-rw-r--r--src/compiler/scala/tools/nsc/transform/Reifiers.scala5
-rw-r--r--src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala129
-rw-r--r--src/compiler/scala/tools/nsc/transform/TailCalls.scala8
-rw-r--r--src/compiler/scala/tools/nsc/transform/TypingTransformers.scala3
-rw-r--r--src/compiler/scala/tools/nsc/transform/UnCurry.scala64
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Contexts.scala94
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/DeVirtualize.scala14
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Duplicators.scala25
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala8
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala75
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala185
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala159
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala49
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala210
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala20
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala37
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala1
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala347
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Unapplies.scala5
-rw-r--r--src/compiler/scala/tools/nsc/util/ArgumentsExpander.scala43
-rw-r--r--src/compiler/scala/tools/nsc/util/CharArrayReader.scala39
-rwxr-xr-xsrc/compiler/scala/tools/nsc/util/Chars.scala23
-rw-r--r--src/compiler/scala/tools/nsc/util/ClassPath.scala600
-rw-r--r--src/compiler/scala/tools/nsc/util/CommandLineParser.scala142
-rw-r--r--src/compiler/scala/tools/nsc/util/CommandLineSpec.scala150
-rwxr-xr-xsrc/compiler/scala/tools/nsc/util/DocStrings.scala13
-rw-r--r--src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala13
-rw-r--r--src/compiler/scala/tools/nsc/util/MsilClassPath.scala169
-rw-r--r--src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala56
-rw-r--r--src/compiler/scala/tools/nsc/util/ShowPickled.scala154
-rw-r--r--src/compiler/scala/tools/nsc/util/SourceFile.scala18
-rw-r--r--src/compiler/scala/tools/nsc/util/Statistics.scala4
-rw-r--r--src/compiler/scala/tools/nsc/util/WorkScheduler.scala14
-rw-r--r--src/compiler/scala/tools/util/BashCompletion.scala132
-rw-r--r--src/compiler/scala/tools/util/ClassPathSettings.scala32
-rw-r--r--src/compiler/scala/tools/util/PathResolver.scala253
-rw-r--r--src/compiler/scala/tools/util/StringOps.scala23
-rw-r--r--src/compiler/scala/tools/util/Which.scala39
-rw-r--r--src/continuations/library/scala/util/continuations/ControlContext.scala161
-rw-r--r--src/continuations/library/scala/util/continuations/package.scala65
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala462
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala131
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala414
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSPlugin.scala60
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala384
-rw-r--r--src/continuations/plugin/scalac-plugin.xml5
-rw-r--r--src/dbc/scala/dbc/datatype/Factory.scala2
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JConstantPool.java36
-rw-r--r--src/forkjoin/scala/concurrent/forkjoin/RecursiveTask.java2
-rw-r--r--src/intellij/actors.iml (renamed from actors.iml)4
-rw-r--r--src/intellij/compiler.iml (renamed from compiler.iml)9
-rw-r--r--src/intellij/dbc.iml (renamed from dbc.iml)4
-rw-r--r--src/intellij/library.iml (renamed from library.iml)10
-rw-r--r--src/intellij/manual.iml (renamed from manual.iml)4
-rw-r--r--src/intellij/partest.iml (renamed from partest.iml)6
-rw-r--r--src/intellij/scala-lang.ipr1446
-rw-r--r--src/intellij/scalap.iml (renamed from scalap.iml)4
-rw-r--r--src/intellij/swing.iml (renamed from swing.iml)4
-rw-r--r--src/library/scala/Application.scala3
-rw-r--r--src/library/scala/Array.scala23
-rw-r--r--src/library/scala/Console.scala2
-rw-r--r--src/library/scala/Enumeration.scala94
-rw-r--r--src/library/scala/Function.scala11
-rw-r--r--src/library/scala/Immutable.scala2
-rw-r--r--src/library/scala/LowPriorityImplicits.scala26
-rw-r--r--src/library/scala/NotDefinedError.scala1
-rw-r--r--src/library/scala/Option.scala21
-rw-r--r--src/library/scala/Predef.scala72
-rw-r--r--src/library/scala/Product.scala17
-rw-r--r--src/library/scala/Tuple2.scala5
-rw-r--r--src/library/scala/Tuple3.scala5
-rw-r--r--src/library/scala/annotation/elidable.scala8
-rw-r--r--src/library/scala/annotation/migration.scala28
-rw-r--r--src/library/scala/collection/BitSetLike.scala5
-rw-r--r--src/library/scala/collection/IndexedSeq.scala10
-rw-r--r--src/library/scala/collection/IndexedSeqLike.scala274
-rwxr-xr-xsrc/library/scala/collection/IndexedSeqOptimized.scala293
-rw-r--r--src/library/scala/collection/IndexedSeqView.scala38
-rw-r--r--src/library/scala/collection/IndexedSeqViewLike.scala113
-rw-r--r--src/library/scala/collection/IterableLike.scala8
-rw-r--r--src/library/scala/collection/IterableProxyLike.scala26
-rw-r--r--src/library/scala/collection/IterableViewLike.scala11
-rw-r--r--src/library/scala/collection/Iterator.scala359
-rw-r--r--src/library/scala/collection/JavaConversions.scala15
-rw-r--r--src/library/scala/collection/LinearSeq.scala12
-rw-r--r--src/library/scala/collection/LinearSeqLike.scala282
-rwxr-xr-xsrc/library/scala/collection/LinearSeqOptimized.scala301
-rw-r--r--src/library/scala/collection/MapLike.scala54
-rw-r--r--src/library/scala/collection/MapProxyLike.scala5
-rw-r--r--src/library/scala/collection/SeqLike.scala107
-rw-r--r--src/library/scala/collection/SeqProxyLike.scala33
-rw-r--r--src/library/scala/collection/SeqView.scala2
-rw-r--r--src/library/scala/collection/SeqViewLike.scala49
-rw-r--r--src/library/scala/collection/SetLike.scala2
-rw-r--r--src/library/scala/collection/SortedMap.scala4
-rw-r--r--src/library/scala/collection/Traversable.scala3
-rw-r--r--src/library/scala/collection/TraversableLike.scala511
-rw-r--r--src/library/scala/collection/TraversableOnce.scala522
-rw-r--r--src/library/scala/collection/TraversableProxy.scala2
-rw-r--r--src/library/scala/collection/TraversableProxyLike.scala60
-rw-r--r--src/library/scala/collection/TraversableView.scala2
-rw-r--r--src/library/scala/collection/TraversableViewLike.scala60
-rw-r--r--src/library/scala/collection/generic/Addable.scala13
-rw-r--r--src/library/scala/collection/generic/GenericTraversableTemplate.scala4
-rw-r--r--src/library/scala/collection/generic/Growable.scala18
-rw-r--r--src/library/scala/collection/generic/IterableForwarder.scala4
-rw-r--r--src/library/scala/collection/generic/SeqForwarder.scala25
-rw-r--r--src/library/scala/collection/generic/Shrinkable.scala10
-rw-r--r--src/library/scala/collection/generic/Sorted.scala27
-rw-r--r--src/library/scala/collection/generic/Subtractable.scala11
-rw-r--r--src/library/scala/collection/generic/TraversableFactory.scala2
-rw-r--r--src/library/scala/collection/generic/TraversableForwarder.scala38
-rw-r--r--src/library/scala/collection/generic/TraversableView.scala.1152
-rwxr-xr-xsrc/library/scala/collection/immutable/DefaultMap.scala53
-rw-r--r--src/library/scala/collection/immutable/HashMap.scala487
-rw-r--r--src/library/scala/collection/immutable/HashSet.scala421
-rw-r--r--src/library/scala/collection/immutable/IndexedSeq.scala7
-rw-r--r--src/library/scala/collection/immutable/IntMap.scala5
-rw-r--r--src/library/scala/collection/immutable/LinearSeq.scala2
-rw-r--r--src/library/scala/collection/immutable/List.scala33
-rw-r--r--src/library/scala/collection/immutable/ListMap.scala2
-rw-r--r--src/library/scala/collection/immutable/LongMap.scala4
-rw-r--r--src/library/scala/collection/immutable/Map.scala20
-rw-r--r--src/library/scala/collection/immutable/MapLike.scala58
-rw-r--r--src/library/scala/collection/immutable/MapProxy.scala4
-rw-r--r--src/library/scala/collection/immutable/NumericRange.scala31
-rw-r--r--src/library/scala/collection/immutable/PagedSeq.scala6
-rw-r--r--src/library/scala/collection/immutable/Queue.scala29
-rw-r--r--src/library/scala/collection/immutable/Range.scala96
-rw-r--r--src/library/scala/collection/immutable/RedBlack.scala3
-rw-r--r--src/library/scala/collection/immutable/Set.scala14
-rw-r--r--src/library/scala/collection/immutable/SortedMap.scala14
-rw-r--r--src/library/scala/collection/immutable/Stack.scala16
-rw-r--r--src/library/scala/collection/immutable/Stream.scala28
-rw-r--r--src/library/scala/collection/immutable/StringLike.scala4
-rw-r--r--src/library/scala/collection/immutable/TreeSet.scala4
-rw-r--r--src/library/scala/collection/immutable/Vector.scala177
-rw-r--r--src/library/scala/collection/interfaces/MapMethods.scala8
-rw-r--r--src/library/scala/collection/interfaces/SeqMethods.scala2
-rw-r--r--src/library/scala/collection/interfaces/SetMethods.scala6
-rw-r--r--src/library/scala/collection/interfaces/TraversableMethods.scala5
-rw-r--r--src/library/scala/collection/interfaces/TraversableOnceMethods.scala69
-rw-r--r--src/library/scala/collection/mutable/AddingBuilder.scala2
-rw-r--r--src/library/scala/collection/mutable/ArrayBuffer.scala14
-rw-r--r--src/library/scala/collection/mutable/ArrayBuilder.scala20
-rw-r--r--src/library/scala/collection/mutable/ArrayLike.scala2
-rw-r--r--src/library/scala/collection/mutable/ArrayOps.scala6
-rw-r--r--src/library/scala/collection/mutable/ArraySeq.scala (renamed from src/library/scala/collection/mutable/GenericArray.scala)16
-rw-r--r--src/library/scala/collection/mutable/ArrayStack.scala10
-rw-r--r--src/library/scala/collection/mutable/BufferLike.scala137
-rw-r--r--src/library/scala/collection/mutable/BufferProxy.scala11
-rw-r--r--src/library/scala/collection/mutable/Builder.scala5
-rw-r--r--src/library/scala/collection/mutable/ConcurrentMap.scala18
-rw-r--r--src/library/scala/collection/mutable/DoubleLinkedList.scala2
-rw-r--r--src/library/scala/collection/mutable/FlatHashTable.scala4
-rw-r--r--src/library/scala/collection/mutable/GrowingBuilder.scala30
-rw-r--r--src/library/scala/collection/mutable/HashMap.scala2
-rw-r--r--src/library/scala/collection/mutable/HashSet.scala2
-rw-r--r--src/library/scala/collection/mutable/HashTable.scala2
-rw-r--r--src/library/scala/collection/mutable/ImmutableMapAdaptor.scala13
-rw-r--r--src/library/scala/collection/mutable/IndexedSeq.scala3
-rwxr-xr-xsrc/library/scala/collection/mutable/IndexedSeqOptimized.scala21
-rw-r--r--src/library/scala/collection/mutable/IndexedSeqView.scala28
-rw-r--r--src/library/scala/collection/mutable/LazyBuilder.scala5
-rw-r--r--src/library/scala/collection/mutable/LinearSeq.scala5
-rw-r--r--src/library/scala/collection/mutable/LinkedListLike.scala2
-rw-r--r--src/library/scala/collection/mutable/ListBuffer.scala4
-rw-r--r--src/library/scala/collection/mutable/ListMap.scala2
-rw-r--r--src/library/scala/collection/mutable/MapLike.scala114
-rw-r--r--src/library/scala/collection/mutable/MapLikeBase.scala37
-rw-r--r--src/library/scala/collection/mutable/MapProxy.scala10
-rw-r--r--src/library/scala/collection/mutable/MultiMap.scala3
-rw-r--r--src/library/scala/collection/mutable/MutableList.scala2
-rw-r--r--src/library/scala/collection/mutable/OpenHashMap.scala2
-rw-r--r--src/library/scala/collection/mutable/PriorityQueue.scala55
-rw-r--r--src/library/scala/collection/mutable/PriorityQueueProxy.scala12
-rw-r--r--src/library/scala/collection/mutable/Publisher.scala2
-rw-r--r--src/library/scala/collection/mutable/Queue.scala6
-rw-r--r--src/library/scala/collection/mutable/QueueProxy.scala13
-rw-r--r--src/library/scala/collection/mutable/ResizableArray.scala2
-rw-r--r--src/library/scala/collection/mutable/SetBuilder.scala15
-rw-r--r--src/library/scala/collection/mutable/SetLike.scala92
-rw-r--r--src/library/scala/collection/mutable/Stack.scala27
-rw-r--r--src/library/scala/collection/mutable/StackProxy.scala25
-rw-r--r--src/library/scala/collection/mutable/SynchronizedBuffer.scala24
-rw-r--r--src/library/scala/collection/mutable/SynchronizedMap.scala12
-rw-r--r--src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala16
-rw-r--r--src/library/scala/collection/mutable/SynchronizedQueue.scala10
-rw-r--r--src/library/scala/collection/mutable/SynchronizedSet.scala18
-rw-r--r--src/library/scala/collection/mutable/SynchronizedStack.scala10
-rw-r--r--src/library/scala/collection/mutable/WeakHashMap.scala11
-rw-r--r--src/library/scala/collection/mutable/WrappedArray.scala7
-rwxr-xr-xsrc/library/scala/collection/readme-if-you-want-to-add-something.txt50
-rw-r--r--src/library/scala/compat/Platform.scala2
-rw-r--r--src/library/scala/concurrent/DelayedLazyVal.scala10
-rw-r--r--src/library/scala/math/BigDecimal.scala7
-rw-r--r--src/library/scala/math/BigInt.scala2
-rw-r--r--src/library/scala/math/Numeric.scala15
-rw-r--r--src/library/scala/math/Ordering.scala2
-rw-r--r--src/library/scala/package.scala2
-rw-r--r--src/library/scala/reflect/ClassManifest.scala18
-rw-r--r--src/library/scala/reflect/Code.scala2
-rw-r--r--src/library/scala/reflect/Manifest.scala24
-rwxr-xr-x[-rw-r--r--]src/library/scala/reflect/NameTransformer.scala (renamed from src/library/scala/util/NameTransformer.scala)4
-rw-r--r--src/library/scala/reflect/ScalaSignature.java13
-rwxr-xr-xsrc/library/scala/reflect/generic/AnnotationInfos.scala50
-rw-r--r--src/library/scala/reflect/generic/ByteCodecs.scala209
-rwxr-xr-x[-rw-r--r--]src/library/scala/reflect/generic/Constants.scala (renamed from src/compiler/scala/tools/nsc/symtab/Constants.scala)29
-rwxr-xr-xsrc/library/scala/reflect/generic/Flags.scala198
-rwxr-xr-xsrc/library/scala/reflect/generic/Names.scala21
-rwxr-xr-x[-rw-r--r--]src/library/scala/reflect/generic/PickleBuffer.scala (renamed from src/compiler/scala/tools/nsc/symtab/classfile/PickleBuffer.scala)30
-rwxr-xr-x[-rw-r--r--]src/library/scala/reflect/generic/PickleFormat.scala (renamed from src/compiler/scala/tools/nsc/symtab/classfile/PickleFormat.scala)19
-rwxr-xr-xsrc/library/scala/reflect/generic/Scopes.scala15
-rwxr-xr-xsrc/library/scala/reflect/generic/StandardDefinitions.scala67
-rwxr-xr-xsrc/library/scala/reflect/generic/StdNames.scala26
-rwxr-xr-xsrc/library/scala/reflect/generic/Symbols.scala194
-rwxr-xr-xsrc/library/scala/reflect/generic/Trees.scala738
-rwxr-xr-xsrc/library/scala/reflect/generic/Types.scala156
-rwxr-xr-xsrc/library/scala/reflect/generic/UnPickler.scala775
-rwxr-xr-xsrc/library/scala/reflect/generic/Universe.scala16
-rw-r--r--src/library/scala/runtime/BoxesRunTime.java103
-rw-r--r--src/library/scala/runtime/NonLocalReturnControl.scala16
-rw-r--r--src/library/scala/runtime/NonLocalReturnException.scala7
-rw-r--r--src/library/scala/runtime/RichChar.scala24
-rw-r--r--src/library/scala/runtime/ScalaRunTime.scala86
-rw-r--r--src/library/scala/testing/SUnit.scala20
-rw-r--r--src/library/scala/throws.scala14
-rw-r--r--src/library/scala/util/Properties.scala107
-rw-r--r--src/library/scala/util/Random.scala14
-rw-r--r--src/library/scala/util/Sorting.scala106
-rw-r--r--src/library/scala/util/automata/SubsetConstruction.scala2
-rw-r--r--src/library/scala/util/automata/WordBerrySethi.scala2
-rw-r--r--src/library/scala/util/control/Breaks.scala6
-rw-r--r--src/library/scala/util/control/ControlThrowable.scala (renamed from src/library/scala/util/control/ControlException.scala)8
-rw-r--r--src/library/scala/util/logging/ConsoleLogger.scala2
-rw-r--r--src/library/scala/util/matching/Regex.scala60
-rw-r--r--src/library/scala/util/parsing/ast/Binders.scala22
-rw-r--r--src/library/scala/util/parsing/combinator/Parsers.scala52
-rw-r--r--src/library/scala/util/parsing/combinator/lexical/Lexical.scala9
-rw-r--r--src/library/scala/util/parsing/combinator/lexical/Scanners.scala16
-rw-r--r--src/library/scala/util/parsing/combinator/lexical/StdLexical.scala9
-rw-r--r--src/library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala10
-rw-r--r--src/library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala9
-rw-r--r--src/library/scala/util/parsing/combinator/syntactical/TokenParsers.scala16
-rw-r--r--src/library/scala/util/parsing/combinator/token/StdTokens.scala (renamed from src/library/scala/util/parsing/syntax/StdTokens.scala)4
-rw-r--r--src/library/scala/util/parsing/combinator/token/Tokens.scala (renamed from src/library/scala/util/parsing/syntax/Tokens.scala)4
-rw-r--r--src/library/scala/util/parsing/input/Position.scala2
-rw-r--r--src/library/scala/util/parsing/syntax/package.scala19
-rw-r--r--src/library/scala/xml/Atom.scala18
-rw-r--r--src/library/scala/xml/Attribute.scala46
-rw-r--r--src/library/scala/xml/Comment.scala2
-rw-r--r--src/library/scala/xml/Document.scala4
-rw-r--r--src/library/scala/xml/Elem.scala18
-rw-r--r--src/library/scala/xml/EntityRef.scala2
-rw-r--r--src/library/scala/xml/Equality.scala115
-rw-r--r--src/library/scala/xml/Group.scala58
-rw-r--r--src/library/scala/xml/MetaData.scala47
-rw-r--r--src/library/scala/xml/NamespaceBinding.scala13
-rw-r--r--src/library/scala/xml/Node.scala65
-rw-r--r--src/library/scala/xml/NodeBuffer.scala3
-rw-r--r--src/library/scala/xml/NodeSeq.scala38
-rw-r--r--src/library/scala/xml/Null.scala57
-rw-r--r--src/library/scala/xml/PCData.scala7
-rw-r--r--src/library/scala/xml/PrefixedAttribute.scala52
-rw-r--r--src/library/scala/xml/PrettyPrinter.scala5
-rw-r--r--src/library/scala/xml/ProcInstr.scala1
-rw-r--r--src/library/scala/xml/SpecialNode.scala4
-rw-r--r--src/library/scala/xml/Text.scala10
-rw-r--r--src/library/scala/xml/TextBuffer.scala4
-rw-r--r--src/library/scala/xml/TopScope.scala2
-rw-r--r--src/library/scala/xml/Unparsed.scala7
-rw-r--r--src/library/scala/xml/UnprefixedAttribute.scala36
-rw-r--r--src/library/scala/xml/Utility.scala38
-rw-r--r--src/library/scala/xml/XML.scala16
-rw-r--r--src/library/scala/xml/dtd/ContentModel.scala5
-rw-r--r--src/library/scala/xml/dtd/ContentModelParser.scala3
-rw-r--r--src/library/scala/xml/dtd/DTD.scala22
-rw-r--r--src/library/scala/xml/dtd/Decl.scala6
-rw-r--r--src/library/scala/xml/dtd/DocType.scala3
-rw-r--r--src/library/scala/xml/dtd/ElementValidator.scala4
-rw-r--r--src/library/scala/xml/dtd/ExternalID.scala5
-rw-r--r--src/library/scala/xml/dtd/Scanner.scala4
-rw-r--r--src/library/scala/xml/factory/Binder.scala2
-rw-r--r--src/library/scala/xml/factory/NodeFactory.scala4
-rw-r--r--src/library/scala/xml/factory/XMLLoader.scala4
-rw-r--r--src/library/scala/xml/include/XIncludeException.scala2
-rw-r--r--src/library/scala/xml/include/sax/Main.scala7
-rw-r--r--src/library/scala/xml/include/sax/XIncludeFilter.scala93
-rw-r--r--src/library/scala/xml/include/sax/XIncluder.scala24
-rw-r--r--src/library/scala/xml/package.scala18
-rw-r--r--src/library/scala/xml/parsing/ConstructingParser.scala43
-rw-r--r--src/library/scala/xml/parsing/DefaultMarkupHandler.scala2
-rw-r--r--src/library/scala/xml/parsing/FactoryAdapter.scala11
-rw-r--r--src/library/scala/xml/parsing/FatalError.scala7
-rw-r--r--src/library/scala/xml/parsing/MarkupHandler.scala7
-rw-r--r--src/library/scala/xml/parsing/MarkupParser.scala255
-rw-r--r--src/library/scala/xml/parsing/MarkupParserCommon.scala180
-rw-r--r--src/library/scala/xml/parsing/NoBindingFactoryAdapter.scala4
-rw-r--r--src/library/scala/xml/parsing/TokenTests.scala2
-rw-r--r--src/library/scala/xml/parsing/ValidatingMarkupHandler.scala2
-rw-r--r--src/library/scala/xml/parsing/XhtmlEntities.scala3
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/Attribute.java2
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/PEFile.java16
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/PEModule.java13
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/ILPrinterVisitor.scala2
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/MethodBuilder.scala2
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/MultipleFilesILPrinterVisitor.scala4
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/OpCode.scala4
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/OpCodes.scala4
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/SingleFileILPrinterVisitor.scala2
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/TypeBuilder.scala16
-rw-r--r--src/partest/README75
-rw-r--r--src/partest/scala/tools/partest/Actions.scala168
-rw-r--r--src/partest/scala/tools/partest/Alarms.scala85
-rw-r--r--src/partest/scala/tools/partest/BuildContributors.scala102
-rw-r--r--src/partest/scala/tools/partest/Categories.scala69
-rw-r--r--src/partest/scala/tools/partest/Compilable.scala103
-rw-r--r--src/partest/scala/tools/partest/Config.scala130
-rw-r--r--src/partest/scala/tools/partest/Dispatcher.scala161
-rw-r--r--src/partest/scala/tools/partest/Entities.scala77
-rw-r--r--src/partest/scala/tools/partest/Housekeeping.scala187
-rw-r--r--src/partest/scala/tools/partest/Partest.scala72
-rw-r--r--src/partest/scala/tools/partest/PartestSpec.scala108
-rw-r--r--src/partest/scala/tools/partest/PartestTask.scala250
-rw-r--r--src/partest/scala/tools/partest/Properties.scala (renamed from src/partest/scala/tools/partest/utils/Properties.scala)4
-rw-r--r--src/partest/scala/tools/partest/Results.scala113
-rw-r--r--src/partest/scala/tools/partest/Runner.scala39
-rw-r--r--src/partest/scala/tools/partest/Statistics.scala46
-rw-r--r--src/partest/scala/tools/partest/Universe.scala101
-rw-r--r--src/partest/scala/tools/partest/ant/JavaTask.scala55
-rw-r--r--src/partest/scala/tools/partest/ant/PartestTask.scala90
-rw-r--r--src/partest/scala/tools/partest/antlib.xml3
-rw-r--r--src/partest/scala/tools/partest/category/AllCategories.scala20
-rw-r--r--src/partest/scala/tools/partest/category/Analysis.scala65
-rw-r--r--src/partest/scala/tools/partest/category/Compiler.scala142
-rw-r--r--src/partest/scala/tools/partest/category/Runner.scala108
-rw-r--r--src/partest/scala/tools/partest/io/ANSIWriter.scala58
-rw-r--r--src/partest/scala/tools/partest/io/Diff.java (renamed from src/partest/scala/tools/partest/nest/Diff.java)2
-rw-r--r--src/partest/scala/tools/partest/io/DiffPrint.java (renamed from src/partest/scala/tools/partest/nest/DiffPrint.java)2
-rw-r--r--src/partest/scala/tools/partest/io/JUnitReport.scala38
-rw-r--r--src/partest/scala/tools/partest/io/Logging.scala132
-rw-r--r--src/partest/scala/tools/partest/nest/AntRunner.scala32
-rw-r--r--src/partest/scala/tools/partest/nest/CompileManager.scala218
-rw-r--r--src/partest/scala/tools/partest/nest/ConsoleFileManager.scala314
-rw-r--r--src/partest/scala/tools/partest/nest/ConsoleRunner.scala237
-rw-r--r--src/partest/scala/tools/partest/nest/DirectRunner.scala78
-rw-r--r--src/partest/scala/tools/partest/nest/FileManager.scala79
-rw-r--r--src/partest/scala/tools/partest/nest/NestRunner.scala16
-rw-r--r--src/partest/scala/tools/partest/nest/NestUI.scala108
-rw-r--r--src/partest/scala/tools/partest/nest/ReflectiveRunner.scala80
-rw-r--r--src/partest/scala/tools/partest/nest/RunnerUtils.scala42
-rw-r--r--src/partest/scala/tools/partest/nest/StreamAppender.scala90
-rw-r--r--src/partest/scala/tools/partest/nest/TestFile.scala109
-rw-r--r--src/partest/scala/tools/partest/nest/Worker.scala1000
-rw-r--r--src/partest/scala/tools/partest/package.scala47
-rw-r--r--src/partest/scala/tools/partest/util/package.scala61
-rw-r--r--src/partest/scala/tools/partest/utils/PrintMgr.scala52
-rw-r--r--src/scalap/scala/tools/scalap/Classfile.scala1
-rw-r--r--src/scalap/scala/tools/scalap/Decode.scala43
-rw-r--r--src/scalap/scala/tools/scalap/Main.scala58
-rw-r--r--src/scalap/scala/tools/scalap/Properties.scala1
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/Functors.scala4
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/Memoisable.scala2
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/Result.scala10
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/Rules.scala2
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/SeqRule.scala4
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala57
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala11
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala252
-rw-r--r--src/swing/scala/swing/AbstractButton.scala10
-rw-r--r--src/swing/scala/swing/Action.scala16
-rw-r--r--src/swing/scala/swing/BorderPanel.scala8
-rw-r--r--src/swing/scala/swing/BufferWrapper.scala1
-rw-r--r--src/swing/scala/swing/ComboBox.scala5
-rw-r--r--src/swing/scala/swing/Component.scala9
-rw-r--r--src/swing/scala/swing/Font.scala70
-rw-r--r--src/swing/scala/swing/LayoutContainer.scala9
-rw-r--r--src/swing/scala/swing/ListView.scala4
-rw-r--r--src/swing/scala/swing/Publisher.scala8
-rw-r--r--src/swing/scala/swing/RichWindow.scala9
-rw-r--r--src/swing/scala/swing/RootPanel.scala12
-rw-r--r--src/swing/scala/swing/ScrollPane.scala12
-rw-r--r--src/swing/scala/swing/SimpleGUIApplication.scala6
-rw-r--r--src/swing/scala/swing/SimpleSwingApplication.scala4
-rw-r--r--src/swing/scala/swing/Slider.scala9
-rw-r--r--src/swing/scala/swing/Swing.scala3
-rw-r--r--src/swing/scala/swing/Table.scala6
-rw-r--r--src/swing/scala/swing/TextComponent.scala1
-rw-r--r--src/swing/scala/swing/TextField.scala13
-rw-r--r--src/swing/scala/swing/UIElement.scala8
-rw-r--r--src/swing/scala/swing/Window.scala6
-rw-r--r--src/swing/scala/swing/event/TableEvent.scala2
-rw-r--r--src/swing/scala/swing/package.scala78
-rw-r--r--src/swing/scala/swing/test/SimpleApplet.scala3
-rw-r--r--test/attic/files/cli/test1/Main.check.j9vm5 (renamed from test/files/cli/test1/Main.check.j9vm5)0
-rw-r--r--test/attic/files/cli/test1/Main.check.java (renamed from test/files/cli/test1/Main.check.java)0
-rw-r--r--test/attic/files/cli/test1/Main.check.java5 (renamed from test/files/cli/test1/Main.check.java5)0
-rw-r--r--test/attic/files/cli/test1/Main.check.java5_api (renamed from test/files/cli/test1/Main.check.java5_api)0
-rw-r--r--test/attic/files/cli/test1/Main.check.java5_j9 (renamed from test/files/cli/test1/Main.check.java5_j9)0
-rw-r--r--test/attic/files/cli/test1/Main.check.javac (renamed from test/files/cli/test1/Main.check.javac)0
-rw-r--r--test/attic/files/cli/test1/Main.check.javac5 (renamed from test/files/cli/test1/Main.check.javac5)0
-rw-r--r--test/attic/files/cli/test1/Main.check.javac6 (renamed from test/files/cli/test1/Main.check.javac6)0
-rw-r--r--test/attic/files/cli/test1/Main.check.jikes (renamed from test/files/cli/test1/Main.check.jikes)0
-rw-r--r--test/attic/files/cli/test1/Main.check.jikes5 (renamed from test/files/cli/test1/Main.check.jikes5)0
-rw-r--r--test/attic/files/cli/test1/Main.check.scala (renamed from test/files/cli/test1/Main.check.scala)0
-rw-r--r--test/attic/files/cli/test1/Main.check.scala_api (renamed from test/files/cli/test1/Main.check.scala_api)0
-rw-r--r--test/attic/files/cli/test1/Main.check.scala_j9 (renamed from test/files/cli/test1/Main.check.scala_j9)0
-rw-r--r--test/attic/files/cli/test1/Main.check.scalac (renamed from test/files/cli/test1/Main.check.scalac)0
-rw-r--r--test/attic/files/cli/test1/Main.check.scalaint (renamed from test/files/cli/test1/Main.check.scalaint)0
-rw-r--r--test/attic/files/cli/test1/Main.java (renamed from test/files/cli/test1/Main.java)0
-rw-r--r--test/attic/files/cli/test1/Main.scala (renamed from test/files/cli/test1/Main.scala)0
-rw-r--r--test/attic/files/cli/test2/Main.check.j9vm5 (renamed from test/files/cli/test2/Main.check.j9vm5)0
-rw-r--r--test/attic/files/cli/test2/Main.check.java (renamed from test/files/cli/test2/Main.check.java)0
-rw-r--r--test/attic/files/cli/test2/Main.check.java5 (renamed from test/files/cli/test2/Main.check.java5)0
-rw-r--r--test/attic/files/cli/test2/Main.check.java5_api (renamed from test/files/cli/test2/Main.check.java5_api)0
-rw-r--r--test/attic/files/cli/test2/Main.check.java5_j9 (renamed from test/files/cli/test2/Main.check.java5_j9)0
-rw-r--r--test/attic/files/cli/test2/Main.check.javac (renamed from test/files/cli/test2/Main.check.javac)0
-rw-r--r--test/attic/files/cli/test2/Main.check.javac5 (renamed from test/files/cli/test2/Main.check.javac5)0
-rw-r--r--test/attic/files/cli/test2/Main.check.javac6 (renamed from test/files/cli/test2/Main.check.javac6)0
-rw-r--r--test/attic/files/cli/test2/Main.check.jikes (renamed from test/files/cli/test2/Main.check.jikes)0
-rw-r--r--test/attic/files/cli/test2/Main.check.jikes5 (renamed from test/files/cli/test2/Main.check.jikes5)0
-rw-r--r--test/attic/files/cli/test2/Main.check.scala (renamed from test/files/cli/test2/Main.check.scala)0
-rw-r--r--test/attic/files/cli/test2/Main.check.scala_api (renamed from test/files/cli/test2/Main.check.scala_api)0
-rw-r--r--test/attic/files/cli/test2/Main.check.scala_j9 (renamed from test/files/cli/test2/Main.check.scala_j9)0
-rw-r--r--test/attic/files/cli/test2/Main.check.scalac (renamed from test/files/cli/test2/Main.check.scalac)0
-rw-r--r--test/attic/files/cli/test2/Main.check.scalaint (renamed from test/files/cli/test2/Main.check.scalaint)0
-rw-r--r--test/attic/files/cli/test2/Main.java (renamed from test/files/cli/test2/Main.java)0
-rw-r--r--test/attic/files/cli/test2/Main.scala (renamed from test/files/cli/test2/Main.scala)0
-rw-r--r--test/attic/files/cli/test3/Main.check.j9vm5 (renamed from test/files/cli/test3/Main.check.j9vm5)0
-rw-r--r--test/attic/files/cli/test3/Main.check.java (renamed from test/files/cli/test3/Main.check.java)0
-rw-r--r--test/attic/files/cli/test3/Main.check.java5 (renamed from test/files/cli/test3/Main.check.java5)0
-rw-r--r--test/attic/files/cli/test3/Main.check.java5_api (renamed from test/files/cli/test3/Main.check.java5_api)0
-rw-r--r--test/attic/files/cli/test3/Main.check.java5_j9 (renamed from test/files/cli/test3/Main.check.java5_j9)0
-rw-r--r--test/attic/files/cli/test3/Main.check.javac (renamed from test/files/cli/test3/Main.check.javac)0
-rw-r--r--test/attic/files/cli/test3/Main.check.javac5 (renamed from test/files/cli/test3/Main.check.javac5)0
-rw-r--r--test/attic/files/cli/test3/Main.check.javac6 (renamed from test/files/cli/test3/Main.check.javac6)0
-rw-r--r--test/attic/files/cli/test3/Main.check.jikes (renamed from test/files/cli/test3/Main.check.jikes)0
-rw-r--r--test/attic/files/cli/test3/Main.check.jikes5 (renamed from test/files/cli/test3/Main.check.jikes5)0
-rw-r--r--test/attic/files/cli/test3/Main.check.scala (renamed from test/files/cli/test3/Main.check.scala)0
-rw-r--r--test/attic/files/cli/test3/Main.check.scala_api (renamed from test/files/cli/test3/Main.check.scala_api)0
-rw-r--r--test/attic/files/cli/test3/Main.check.scala_j9 (renamed from test/files/cli/test3/Main.check.scala_j9)0
-rw-r--r--test/attic/files/cli/test3/Main.check.scalac (renamed from test/files/cli/test3/Main.check.scalac)0
-rw-r--r--test/attic/files/cli/test3/Main.check.scalaint (renamed from test/files/cli/test3/Main.check.scalaint)0
-rw-r--r--test/attic/files/cli/test3/Main.java (renamed from test/files/cli/test3/Main.java)0
-rw-r--r--test/attic/files/cli/test3/Main.scala (renamed from test/files/cli/test3/Main.scala)0
-rw-r--r--test/continuations/neg/function0.check6
-rw-r--r--test/continuations/neg/function0.scala16
-rw-r--r--test/continuations/neg/function2.check6
-rw-r--r--test/continuations/neg/function2.scala16
-rw-r--r--test/continuations/neg/function3.check6
-rw-r--r--test/continuations/neg/function3.scala15
-rw-r--r--test/continuations/neg/infer0.check4
-rw-r--r--test/continuations/neg/infer0.scala14
-rw-r--r--test/continuations/neg/infer2.check4
-rw-r--r--test/continuations/neg/infer2.scala19
-rw-r--r--test/continuations/neg/lazy.check6
-rw-r--r--test/continuations/neg/lazy.scala16
-rw-r--r--test/continuations/neg/t1929.check6
-rw-r--r--test/continuations/neg/t1929.scala17
-rw-r--r--test/continuations/neg/t2285.check6
-rw-r--r--test/continuations/neg/t2285.scala11
-rw-r--r--test/continuations/neg/t2949.check6
-rw-r--r--test/continuations/neg/t2949.scala15
-rw-r--r--test/continuations/neg/trycatch2.check7
-rw-r--r--test/continuations/neg/trycatch2.scala33
-rwxr-xr-xtest/continuations/run/basics.check2
-rwxr-xr-xtest/continuations/run/basics.scala23
-rw-r--r--test/continuations/run/function1.check1
-rw-r--r--test/continuations/run/function1.scala16
-rw-r--r--test/continuations/run/function4.check1
-rw-r--r--test/continuations/run/function4.scala15
-rw-r--r--test/continuations/run/function5.check1
-rw-r--r--test/continuations/run/function5.scala15
-rw-r--r--test/continuations/run/function6.check1
-rw-r--r--test/continuations/run/function6.scala16
-rw-r--r--test/continuations/run/ifelse0.check2
-rw-r--r--test/continuations/run/ifelse0.scala18
-rw-r--r--test/continuations/run/ifelse1.check4
-rw-r--r--test/continuations/run/ifelse1.scala25
-rw-r--r--test/continuations/run/ifelse2.check4
-rw-r--r--test/continuations/run/ifelse2.scala16
-rw-r--r--test/continuations/run/ifelse3.check2
-rw-r--r--test/continuations/run/ifelse3.scala21
-rw-r--r--test/continuations/run/infer1.scala33
-rw-r--r--test/continuations/run/match0.check2
-rw-r--r--test/continuations/run/match0.scala18
-rw-r--r--test/continuations/run/match1.check2
-rw-r--r--test/continuations/run/match1.scala18
-rw-r--r--test/continuations/run/match2.check2
-rw-r--r--test/continuations/run/match2.scala26
-rw-r--r--test/continuations/run/t1807.check1
-rw-r--r--test/continuations/run/t1807.scala14
-rw-r--r--test/continuations/run/t1808.scala10
-rw-r--r--test/continuations/run/t1820.scala14
-rw-r--r--test/continuations/run/t1821.check4
-rw-r--r--test/continuations/run/t1821.scala20
-rw-r--r--test/continuations/run/t2864.check1
-rw-r--r--test/continuations/run/t2864.scala30
-rw-r--r--test/continuations/run/t2934.check1
-rw-r--r--test/continuations/run/t2934.scala10
-rw-r--r--test/continuations/run/t3199.check1
-rw-r--r--test/continuations/run/t3199.scala20
-rw-r--r--test/continuations/run/t3199b.check1
-rw-r--r--test/continuations/run/t3199b.scala11
-rw-r--r--test/continuations/run/t3223.check1
-rw-r--r--test/continuations/run/t3223.scala19
-rw-r--r--test/continuations/run/t3225.check12
-rw-r--r--test/continuations/run/t3225.scala56
-rw-r--r--test/continuations/run/trycatch0.check2
-rw-r--r--test/continuations/run/trycatch0.scala25
-rw-r--r--test/continuations/run/trycatch1.check4
-rw-r--r--test/continuations/run/trycatch1.scala48
-rw-r--r--test/continuations/run/while0.check1
-rw-r--r--test/continuations/run/while0.scala22
-rw-r--r--test/continuations/run/while1.check11
-rw-r--r--test/continuations/run/while1.scala22
-rw-r--r--test/continuations/run/while2.check19
-rw-r--r--test/continuations/run/while2.scala23
-rw-r--r--test/debug/buildmanager/.gitignore0
-rw-r--r--test/debug/jvm/.gitignore0
-rw-r--r--test/debug/neg/.gitignore0
-rw-r--r--test/debug/pos/.gitignore0
-rw-r--r--test/debug/res/.gitignore0
-rw-r--r--test/debug/run/.gitignore0
-rw-r--r--test/debug/scalacheck/.gitignore0
-rw-r--r--test/debug/scalap/.gitignore0
-rw-r--r--test/debug/shootout/.gitignore0
-rw-r--r--test/disabled-windows/script/loadAndExecute.check (renamed from test/files/script/loadAndExecute/loadAndExecute.check)0
-rwxr-xr-xtest/disabled-windows/script/loadAndExecute/lAndE1.scala (renamed from test/files/script/loadAndExecute/lAndE1.scala)0
-rwxr-xr-xtest/disabled-windows/script/loadAndExecute/lAndE2.scala (renamed from test/files/script/loadAndExecute/lAndE2.scala)0
-rwxr-xr-xtest/disabled-windows/script/loadAndExecute/loadAndExecute.scala (renamed from test/files/script/loadAndExecute/loadAndExecute.scala)0
-rwxr-xr-xtest/disabled-windows/script/utf8.bat (renamed from test/files/script/utf8.bat)0
-rw-r--r--test/disabled-windows/script/utf8.check (renamed from test/files/script/utf8.check)0
-rwxr-xr-xtest/disabled-windows/script/utf8.scala (renamed from test/files/script/utf8.scala)7
-rw-r--r--test/disabled/buildmanager/t2651_1/A.scala1
-rw-r--r--test/disabled/buildmanager/t2651_1/B.scala2
-rw-r--r--test/disabled/buildmanager/t2651_1/C.scala3
-rw-r--r--test/disabled/buildmanager/t2651_1/D.scala3
-rw-r--r--test/disabled/buildmanager/t2651_1/t2651_1.changes/A2.scala2
-rw-r--r--test/disabled/buildmanager/t2651_1/t2651_1.check19
-rw-r--r--test/disabled/buildmanager/t2651_1/t2651_1.test3
-rw-r--r--test/disabled/jvm/JavaInteraction.check (renamed from test/files/jvm/JavaInteraction.check)0
-rw-r--r--test/disabled/jvm/JavaInteraction.scala (renamed from test/files/jvm/JavaInteraction.scala)0
-rw-r--r--test/disabled/pos/t1053.scala (renamed from test/pending/pos/t1053.scala)0
-rw-r--r--test/disabled/pos/t2619.scala (renamed from test/pending/pos/t2619.scala)0
-rw-r--r--test/disabled/pos/ticket2251.scala (renamed from test/pending/pos/ticket2251.scala)0
-rw-r--r--test/disabled/run/docgenerator.scala4
-rw-r--r--[-rwxr-xr-x]test/files/bench/equality/eqeq.eqlog (renamed from test/files/bench/equality/eqeq.log)0
-rw-r--r--test/files/buildmanager/annotated/A.scala1
-rw-r--r--test/files/buildmanager/annotated/annotated.check6
-rw-r--r--test/files/buildmanager/annotated/annotated.test2
-rw-r--r--test/files/buildmanager/freshnames/A.scala16
-rw-r--r--test/files/buildmanager/freshnames/B.scala4
-rw-r--r--test/files/buildmanager/freshnames/freshnames.check6
-rw-r--r--test/files/buildmanager/freshnames/freshnames.test2
-rw-r--r--test/files/buildmanager/infer/A.scala16
-rw-r--r--test/files/buildmanager/infer/infer.check6
-rw-r--r--test/files/buildmanager/infer/infer.test2
-rw-r--r--test/files/buildmanager/overloaded_1/A.scala11
-rw-r--r--test/files/buildmanager/overloaded_1/overloaded_1.check6
-rw-r--r--test/files/buildmanager/overloaded_1/overloaded_1.test2
-rw-r--r--test/files/buildmanager/simpletest/A.scala3
-rw-r--r--test/files/buildmanager/simpletest/B.scala3
-rw-r--r--test/files/buildmanager/simpletest/simpletest.changes/A1.scala1
-rw-r--r--test/files/buildmanager/simpletest/simpletest.check11
-rw-r--r--test/files/buildmanager/simpletest/simpletest.test3
-rw-r--r--test/files/buildmanager/t2280/A.scala1
-rw-r--r--test/files/buildmanager/t2280/B.java2
-rw-r--r--test/files/buildmanager/t2280/t2280.check6
-rw-r--r--test/files/buildmanager/t2280/t2280.test2
-rw-r--r--test/files/buildmanager/t2556_1/A.scala3
-rw-r--r--test/files/buildmanager/t2556_1/B.scala3
-rw-r--r--test/files/buildmanager/t2556_1/t2556_1.changes/A2.scala4
-rw-r--r--test/files/buildmanager/t2556_1/t2556_1.check12
-rw-r--r--test/files/buildmanager/t2556_1/t2556_1.test3
-rw-r--r--test/files/buildmanager/t2556_2/A.scala4
-rw-r--r--test/files/buildmanager/t2556_2/B.scala2
-rw-r--r--test/files/buildmanager/t2556_2/C.scala4
-rw-r--r--test/files/buildmanager/t2556_2/t2556_2.changes/A2.scala4
-rw-r--r--test/files/buildmanager/t2556_2/t2556_2.check13
-rw-r--r--test/files/buildmanager/t2556_2/t2556_2.test3
-rw-r--r--test/files/buildmanager/t2556_3/A.scala5
-rw-r--r--test/files/buildmanager/t2556_3/B.scala5
-rw-r--r--test/files/buildmanager/t2556_3/C.scala2
-rw-r--r--test/files/buildmanager/t2556_3/t2556_3.changes/A2.scala5
-rw-r--r--test/files/buildmanager/t2556_3/t2556_3.check18
-rw-r--r--test/files/buildmanager/t2556_3/t2556_3.test3
-rw-r--r--test/files/buildmanager/t2557/A.scala4
-rw-r--r--test/files/buildmanager/t2557/B.scala4
-rw-r--r--test/files/buildmanager/t2557/C.scala3
-rw-r--r--test/files/buildmanager/t2557/D.scala1
-rw-r--r--test/files/buildmanager/t2557/E.scala1
-rw-r--r--test/files/buildmanager/t2557/F.scala4
-rw-r--r--test/files/buildmanager/t2557/t2557.changes/D2.scala2
-rw-r--r--test/files/buildmanager/t2557/t2557.check10
-rw-r--r--test/files/buildmanager/t2557/t2557.test3
-rw-r--r--test/files/buildmanager/t2559/A.scala5
-rw-r--r--test/files/buildmanager/t2559/D.scala8
-rw-r--r--test/files/buildmanager/t2559/t2559.changes/A2.scala5
-rw-r--r--test/files/buildmanager/t2559/t2559.check14
-rw-r--r--test/files/buildmanager/t2559/t2559.test3
-rw-r--r--test/files/buildmanager/t2562/A.scala7
-rw-r--r--test/files/buildmanager/t2562/B.scala8
-rw-r--r--test/files/buildmanager/t2562/t2562.changes/A2.scala8
-rw-r--r--test/files/buildmanager/t2562/t2562.check12
-rw-r--r--test/files/buildmanager/t2562/t2562.test3
-rw-r--r--test/files/buildmanager/t2649/A.scala3
-rw-r--r--test/files/buildmanager/t2649/B.scala4
-rw-r--r--test/files/buildmanager/t2649/t2649.changes/A2.scala4
-rw-r--r--test/files/buildmanager/t2649/t2649.check9
-rw-r--r--test/files/buildmanager/t2649/t2649.test3
-rw-r--r--test/files/buildmanager/t2650_1/A.scala4
-rw-r--r--test/files/buildmanager/t2650_1/B.scala3
-rw-r--r--test/files/buildmanager/t2650_1/t2650_1.changes/A2.scala3
-rw-r--r--test/files/buildmanager/t2650_1/t2650_1.check11
-rw-r--r--test/files/buildmanager/t2650_1/t2650_1.test3
-rw-r--r--test/files/buildmanager/t2650_2/A.scala3
-rw-r--r--test/files/buildmanager/t2650_2/B.scala4
-rw-r--r--test/files/buildmanager/t2650_2/t2650_2.changes/A2.scala4
-rw-r--r--test/files/buildmanager/t2650_2/t2650_2.check13
-rw-r--r--test/files/buildmanager/t2650_2/t2650_2.test3
-rw-r--r--test/files/buildmanager/t2650_3/A.scala4
-rw-r--r--test/files/buildmanager/t2650_3/B.scala3
-rw-r--r--test/files/buildmanager/t2650_3/t2650_3.changes/A2.scala4
-rw-r--r--test/files/buildmanager/t2650_3/t2650_3.check13
-rw-r--r--test/files/buildmanager/t2650_3/t2650_3.test3
-rw-r--r--test/files/buildmanager/t2650_4/A.scala5
-rw-r--r--test/files/buildmanager/t2650_4/B.scala3
-rw-r--r--test/files/buildmanager/t2650_4/t2650_4.changes/A2.scala5
-rw-r--r--test/files/buildmanager/t2650_4/t2650_4.check13
-rw-r--r--test/files/buildmanager/t2650_4/t2650_4.test3
-rw-r--r--test/files/buildmanager/t2651_2/A.scala1
-rw-r--r--test/files/buildmanager/t2651_2/t2651_2.changes/A2.scala1
-rw-r--r--test/files/buildmanager/t2651_2/t2651_2.check6
-rw-r--r--test/files/buildmanager/t2651_2/t2651_2.test3
-rw-r--r--test/files/buildmanager/t2651_3/A.scala3
-rw-r--r--test/files/buildmanager/t2651_3/t2651_3.changes/A2.scala3
-rw-r--r--test/files/buildmanager/t2651_3/t2651_3.check6
-rw-r--r--test/files/buildmanager/t2651_3/t2651_3.test3
-rw-r--r--test/files/buildmanager/t2651_4/A.scala5
-rw-r--r--test/files/buildmanager/t2651_4/B.scala3
-rw-r--r--test/files/buildmanager/t2651_4/t2651_4.changes/A2.scala5
-rw-r--r--test/files/buildmanager/t2651_4/t2651_4.check13
-rw-r--r--test/files/buildmanager/t2651_4/t2651_4.test3
-rw-r--r--test/files/buildmanager/t2652/A.scala3
-rw-r--r--test/files/buildmanager/t2652/B.scala4
-rw-r--r--test/files/buildmanager/t2652/t2652.changes/A2.scala4
-rw-r--r--test/files/buildmanager/t2652/t2652.check9
-rw-r--r--test/files/buildmanager/t2652/t2652.test3
-rw-r--r--test/files/buildmanager/t2653/A.scala2
-rw-r--r--test/files/buildmanager/t2653/B.scala3
-rw-r--r--test/files/buildmanager/t2653/t2653.changes/A2.scala2
-rw-r--r--test/files/buildmanager/t2653/t2653.check13
-rw-r--r--test/files/buildmanager/t2653/t2653.test3
-rw-r--r--test/files/buildmanager/t2654/A.scala2
-rw-r--r--test/files/buildmanager/t2654/B.scala1
-rw-r--r--test/files/buildmanager/t2654/t2654.changes/A2.scala4
-rw-r--r--test/files/buildmanager/t2654/t2654.check6
-rw-r--r--test/files/buildmanager/t2654/t2654.test3
-rw-r--r--test/files/buildmanager/t2655/A.scala4
-rw-r--r--test/files/buildmanager/t2655/B.scala3
-rw-r--r--test/files/buildmanager/t2655/t2655.changes/A2.scala4
-rw-r--r--test/files/buildmanager/t2655/t2655.check13
-rw-r--r--test/files/buildmanager/t2655/t2655.test3
-rw-r--r--test/files/buildmanager/t2657/A.scala3
-rw-r--r--test/files/buildmanager/t2657/B.scala4
-rw-r--r--test/files/buildmanager/t2657/t2657.changes/A2.scala3
-rw-r--r--test/files/buildmanager/t2657/t2657.check13
-rw-r--r--test/files/buildmanager/t2657/t2657.test3
-rw-r--r--test/files/buildmanager/t2789/A.scala5
-rw-r--r--test/files/buildmanager/t2789/B.scala3
-rw-r--r--test/files/buildmanager/t2789/t2789.changes/A2.scala5
-rw-r--r--test/files/buildmanager/t2789/t2789.check11
-rw-r--r--test/files/buildmanager/t2789/t2789.test3
-rw-r--r--test/files/buildmanager/t2790/A.scala5
-rw-r--r--test/files/buildmanager/t2790/B.scala4
-rw-r--r--test/files/buildmanager/t2790/t2790.changes/A2.scala4
-rw-r--r--test/files/buildmanager/t2790/t2790.check14
-rw-r--r--test/files/buildmanager/t2790/t2790.test3
-rw-r--r--test/files/buildmanager/t3045/A.java7
-rw-r--r--test/files/buildmanager/t3045/t3045.check3
-rw-r--r--test/files/buildmanager/t3045/t3045.test1
-rw-r--r--test/files/buildmanager/t3054/bar/Bar.java7
-rw-r--r--test/files/buildmanager/t3054/foo/Foo.scala5
-rw-r--r--test/files/buildmanager/t3054/t3054.check3
-rw-r--r--test/files/buildmanager/t3054/t3054.test1
-rw-r--r--test/files/buildmanager/t3133/A.java7
-rw-r--r--test/files/buildmanager/t3133/t3133.check3
-rw-r--r--test/files/buildmanager/t3133/t3133.test1
-rw-r--r--test/files/files.iml17
-rw-r--r--test/files/jvm/actor-exceptions.check14
-rw-r--r--test/files/jvm/actor-exceptions.scala20
-rw-r--r--test/files/jvm/actor-executor.check20
-rw-r--r--test/files/jvm/actor-executor.scala65
-rw-r--r--test/files/jvm/actor-executor2.check21
-rw-r--r--test/files/jvm/actor-executor2.scala67
-rw-r--r--test/files/jvm/actor-executor3.check20
-rw-r--r--test/files/jvm/actor-executor3.scala52
-rw-r--r--test/files/jvm/actor-getstate.check2
-rw-r--r--test/files/jvm/actor-getstate.scala85
-rw-r--r--test/files/jvm/actor-link-getstate.check5
-rw-r--r--test/files/jvm/actor-link-getstate.scala47
-rw-r--r--test/files/jvm/actor-receivewithin.scala2
-rw-r--r--test/files/jvm/actor-uncaught-exception.check5
-rw-r--r--test/files/jvm/actor-uncaught-exception.scala46
-rw-r--r--test/files/jvm/deprecation.cmds3
-rw-r--r--test/files/jvm/future-alarm.check20
-rw-r--r--test/files/jvm/future-alarm.scala16
-rw-r--r--test/files/jvm/genericNest/genericNest.jar.desired.sha1 (renamed from test/files/lib/genericNest.jar.desired.sha1)0
-rw-r--r--test/files/jvm/genericNest/genericNest.scala (renamed from test/files/jvm/genericNest.scala)0
-rw-r--r--test/files/jvm/inner.scala4
-rw-r--r--test/files/jvm/interpreter.check12
-rw-r--r--test/files/jvm/lib/annotations.jar.desired.sha1 (renamed from test/files/lib/annotations.jar.desired.sha1)0
-rw-r--r--test/files/jvm/lib/nest.jar.desired.sha1 (renamed from test/files/lib/nest.jar.desired.sha1)0
-rw-r--r--test/files/jvm/methvsfield/methvsfield.jar.desired.sha1 (renamed from test/files/lib/methvsfield.jar.desired.sha1)0
-rw-r--r--test/files/jvm/methvsfield/methvsfield.java (renamed from test/files/jvm/methvsfield.java)0
-rw-r--r--test/files/jvm/methvsfield/methvsfield.scala (renamed from test/files/jvm/methvsfield.scala)0
-rw-r--r--test/files/jvm/nest/nest.java (renamed from test/files/jvm/nest.java)0
-rw-r--r--test/files/jvm/nest/nest.scala (renamed from test/files/jvm/nest.scala)0
-rw-r--r--test/files/jvm/outerEnum/enums.jar.desired.sha1 (renamed from test/files/lib/enums.jar.desired.sha1)0
-rw-r--r--test/files/jvm/outerEnum/outerEnum.scala (renamed from test/files/jvm/outerEnum.scala)0
-rw-r--r--test/files/jvm/reactor-exceptionOnSend.scala4
-rw-r--r--test/files/jvm/reactor-producer-consumer.scala10
-rw-r--r--test/files/jvm/reactor.scala9
-rw-r--r--test/files/jvm/replyablereactor.scala5
-rw-r--r--test/files/jvm/replyablereactor2.scala2
-rw-r--r--test/files/jvm/replyablereactor3.scala2
-rw-r--r--test/files/jvm/replyablereactor4.scala2
-rw-r--r--test/files/jvm/replyreactor.scala6
-rw-r--r--test/files/jvm/serialization.check8
-rw-r--r--test/files/jvm/t1652.check2
-rw-r--r--test/files/jvm/t2470.cmds3
-rw-r--r--test/files/jvm/t2827.check3
-rw-r--r--test/files/jvm/t2827.scala14
-rw-r--r--test/files/jvm/t3003.check1
-rw-r--r--test/files/jvm/t3003.cmds2
-rw-r--r--test/files/jvm/t3003/Annot.java4
-rw-r--r--test/files/jvm/t3003/Test_1.scala8
-rw-r--r--test/files/jvm/t3102.check2
-rw-r--r--test/files/jvm/t3102.scala26
-rw-r--r--test/files/jvm/unittest_io.scala2
-rw-r--r--test/files/jvm/xml01.scala40
-rw-r--r--test/files/jvm/xml02.scala16
-rw-r--r--test/files/jvm/xmlstuff.scala8
-rw-r--r--test/files/neg/bug1279a.check6
-rw-r--r--test/files/neg/bug1878.check7
-rw-r--r--test/files/neg/bug2148.check4
-rw-r--r--test/files/neg/bug2148.scala10
-rw-r--r--test/files/neg/bug3123.check4
-rw-r--r--test/files/neg/bug3123.scala5
-rw-r--r--test/files/neg/bug414.check5
-rw-r--r--test/files/neg/bug961.check5
-rw-r--r--test/files/neg/bug961.scala6
-rw-r--r--test/files/neg/migration28.check9
-rw-r--r--test/files/neg/migration28.flags1
-rw-r--r--test/files/neg/migration28.scala12
-rw-r--r--test/files/neg/multi-array.flags1
-rw-r--r--test/files/neg/patmat-type-check.check21
-rw-r--r--test/files/neg/patmat-type-check.scala28
-rw-r--r--test/files/neg/patmatexhaust.check2
-rw-r--r--test/files/neg/t0528neg.scala2
-rw-r--r--test/files/neg/t0851.check9
-rw-r--r--test/files/neg/t0851.scala25
-rw-r--r--test/files/neg/t2179.check4
-rw-r--r--test/files/neg/t2386.check4
-rw-r--r--test/files/neg/t2386.scala3
-rw-r--r--test/files/neg/t2918.check7
-rwxr-xr-xtest/files/neg/t2918.scala3
-rw-r--r--test/files/neg/t3006.check6
-rwxr-xr-xtest/files/neg/t3006.scala10
-rw-r--r--test/files/neg/t3015.check11
-rw-r--r--test/files/neg/t3015.scala8
-rw-r--r--test/files/neg/t3115.check10
-rw-r--r--test/files/neg/t3115.flags1
-rwxr-xr-xtest/files/neg/t3115.scala9
-rw-r--r--test/files/neg/t3118.check7
-rw-r--r--test/files/neg/t3118.scala8
-rw-r--r--test/files/neg/t3222.check13
-rw-r--r--test/files/neg/t3222.scala9
-rw-r--r--test/files/neg/unit2anyref.check6
-rw-r--r--test/files/pos/annotations.scala24
-rw-r--r--test/files/pos/bug0646.scala2
-rw-r--r--test/files/pos/bug2310.scala38
-rw-r--r--test/files/pos/bug3097.flags1
-rw-r--r--test/files/pos/bug3097.scala31
-rw-r--r--test/files/pos/bug3136.scala19
-rw-r--r--test/files/pos/bug3175.scala7
-rw-r--r--test/files/pos/bug432.scala2
-rw-r--r--test/files/pos/bug715.cmds2
-rw-r--r--test/files/pos/manifest1.scala7
-rw-r--r--test/files/pos/scan.scala23
-rw-r--r--test/files/pos/spec-List.scala18
-rw-r--r--test/files/pos/spec-arrays.scala6
-rw-r--r--test/files/pos/spec-partially.flags1
-rw-r--r--test/files/pos/spec-partially.scala5
-rw-r--r--test/files/pos/super.cmds2
-rw-r--r--test/files/pos/switchUnbox.flags2
-rw-r--r--test/files/pos/t0816.scala (renamed from test/pending/pos/t0816.scala)2
-rw-r--r--test/files/pos/t0971.java4
-rw-r--r--test/files/pos/t0999.scala5
-rw-r--r--test/files/pos/t1029.cmds2
-rw-r--r--test/files/pos/t1035.scala (renamed from test/pending/pos/t1035.scala)0
-rw-r--r--test/files/pos/t1164.scala2
-rw-r--r--test/files/pos/t1380.flags1
-rw-r--r--test/files/pos/t1751.cmds3
-rwxr-xr-xtest/files/pos/t1756.scala2
-rw-r--r--test/files/pos/t1782.cmds2
-rw-r--r--test/files/pos/t1836/J.java (renamed from test/pending/pos/t1836/J.java)0
-rw-r--r--test/files/pos/t1836/S.scala (renamed from test/pending/pos/t1836/S.scala)0
-rw-r--r--test/files/pos/t1942.cmds2
-rw-r--r--test/files/pos/t1996.scala (renamed from test/pending/pos/t1996.scala)0
-rw-r--r--test/files/pos/t2421c.scala17
-rwxr-xr-xtest/files/pos/t2433/A.java4
-rwxr-xr-xtest/files/pos/t2433/B.java4
-rwxr-xr-xtest/files/pos/t2433/Test.scala3
-rw-r--r--test/files/pos/t2464.cmds3
-rw-r--r--test/files/pos/t2610.scala (renamed from test/pending/pos/t2610.scala)0
-rw-r--r--test/files/pos/t2660.scala (renamed from test/pending/pos/t2660.scala)0
-rw-r--r--test/files/pos/t2691.scala (renamed from test/pending/pos/t2691.scala)0
-rw-r--r--test/files/pos/t2726.cmds2
-rw-r--r--test/files/pos/t2741/2741-1.scala13
-rw-r--r--test/files/pos/t2741/2741-2.scala5
-rw-r--r--test/files/pos/t2795.scala1
-rw-r--r--test/files/pos/t2797.scala9
-rw-r--r--test/files/pos/t2867.scala1
-rw-r--r--test/files/pos/t2868.cmds3
-rw-r--r--test/files/pos/t2868/Jann.java5
-rw-r--r--test/files/pos/t2868/Nest.java3
-rw-r--r--test/files/pos/t2868/pick_1.scala7
-rw-r--r--test/files/pos/t2868/test_2.scala6
-rwxr-xr-xtest/files/pos/t2913.scala53
-rw-r--r--test/files/pos/t294.cmds3
-rw-r--r--test/files/pos/t2940/Cycle.java3
-rw-r--r--test/files/pos/t2940/Error.scala12
-rw-r--r--test/files/pos/t2956/BeanDefinitionVisitor.java6
-rwxr-xr-xtest/files/pos/t2956/t2956.scala7
-rw-r--r--test/files/pos/t2994a.scala27
-rw-r--r--test/files/pos/t2994b.scala7
-rw-r--r--test/files/pos/t3037.scala13
-rw-r--r--test/files/pos/t3071.scala7
-rw-r--r--test/files/pos/t3076/C2.scala4
-rw-r--r--test/files/pos/t3076/T.scala2
-rw-r--r--test/files/pos/t3079.scala17
-rw-r--r--test/files/pos/t3152.scala20
-rw-r--r--test/files/pos/t425.scala (renamed from test/pending/pos/t425.scala)0
-rw-r--r--test/files/positions/Unsupported2.scala5
-rw-r--r--test/files/res/bug687.check5
-rw-r--r--test/files/run/Course-2002-13.scala4
-rw-r--r--test/files/run/ReplacementMatching.scala47
-rw-r--r--test/files/run/arraycopy.scala31
-rw-r--r--test/files/run/arybufgrow.scala4
-rw-r--r--test/files/run/bigDecimalCache.scala9
-rw-r--r--test/files/run/bug1074.check2
-rw-r--r--test/files/run/bug3126.scala9
-rw-r--r--test/files/run/bug3175.check11
-rw-r--r--test/files/run/bug3175.scala55
-rw-r--r--test/files/run/bug594.scala2
-rw-r--r--test/files/run/bug751.scala6
-rw-r--r--test/files/run/bytecodecs.scala39
-rw-r--r--test/files/run/caseClassEquality.scala36
-rw-r--r--test/files/run/colltest1.check32
-rw-r--r--test/files/run/colltest1.scala8
-rw-r--r--test/files/run/constrained-types.check2
-rw-r--r--test/files/run/elidable.check1
-rw-r--r--test/files/run/elidable.flags1
-rw-r--r--test/files/run/elidable.scala16
-rw-r--r--test/files/run/equality.scala2
-rw-r--r--test/files/run/hashCodeBoxesRunTime.scala2
-rw-r--r--test/files/run/hashCodeDistribution.scala2
-rw-r--r--test/files/run/hashhash.scala15
-rw-r--r--test/files/run/iterators.check2
-rw-r--r--test/files/run/iterators.scala10
-rw-r--r--test/files/run/lists.scala4
-rw-r--r--test/files/run/names-defaults.check2
-rw-r--r--test/files/run/names-defaults.scala15
-rw-r--r--test/files/run/programmatic-main.check26
-rw-r--r--test/files/run/programmatic-main.scala12
-rw-r--r--test/files/run/range.scala3
-rw-r--r--test/files/run/regularpatmat.check126
-rw-r--r--test/files/run/spec-absfun.flags1
-rw-r--r--test/files/run/spec-absfun.scala43
-rw-r--r--test/files/run/spec-matrix.check1
-rw-r--r--test/files/run/spec-matrix.flags1
-rw-r--r--test/files/run/spec-matrix.scala70
-rw-r--r--test/files/run/spec-patmatch.check19
-rw-r--r--test/files/run/spec-patmatch.flags1
-rw-r--r--test/files/run/spec-patmatch.scala52
-rw-r--r--test/files/run/t0017.check2
-rw-r--r--test/files/run/t0528.scala2
-rw-r--r--test/files/run/t1500.scala6
-rw-r--r--test/files/run/t1501.scala8
-rw-r--r--test/files/run/t1773.scala2
-rw-r--r--test/files/run/t2074.scala3
-rw-r--r--test/files/run/t2074_2.check6
-rw-r--r--test/files/run/t2074_2.scala7
-rw-r--r--test/files/run/t2212.scala10
-rw-r--r--test/files/run/t2417.check12
-rw-r--r--test/files/run/t2417.scala77
-rw-r--r--test/files/run/t2526.scala5
-rw-r--r--test/files/run/t2867.scala15
-rw-r--r--test/files/run/t2886.check1
-rw-r--r--test/files/run/t2886.scala7
-rw-r--r--test/files/run/t2946/Parsers.scala4
-rw-r--r--test/files/run/t2946/ResponseCommon.scala14
-rw-r--r--test/files/run/t2946/Test.scala7
-rw-r--r--test/files/run/t3026.check2
-rwxr-xr-xtest/files/run/t3026.scala8
-rw-r--r--test/files/run/t3112.check4
-rw-r--r--test/files/run/t3112.scala11
-rw-r--r--test/files/run/t3158.check1
-rw-r--r--test/files/run/t3158.scala9
-rw-r--r--test/files/run/t3186.check (renamed from test/pending/jvm/t1464.check)0
-rw-r--r--test/files/run/t3186.scala7
-rw-r--r--test/files/run/t3242.check18
-rw-r--r--test/files/run/t3242.scala49
-rw-r--r--test/files/run/t3242b.scala17
-rw-r--r--test/files/run/treePrint.check5
-rw-r--r--test/files/run/treePrint.scala40
-rw-r--r--test/files/run/unittest_collection.scala2
-rw-r--r--test/files/run/unittest_iterator.scala13
-rw-r--r--test/files/run/vector1.scala10
-rw-r--r--test/files/run/viewtest.check12
-rwxr-xr-xtest/files/run/viewtest.scala2
-rw-r--r--test/files/run/withIndex.scala2
-rw-r--r--test/files/scalacheck/array.scala46
-rw-r--r--test/files/scalacheck/eqeq.scala37
-rw-r--r--test/files/scalacheck/list.scala4
-rw-r--r--test/files/scalacheck/range.scala205
-rw-r--r--test/files/scalacheck/scan.scala17
-rw-r--r--test/files/scalap/caseClass/result.test7
-rw-r--r--test/files/scalap/classPrivate/A.scala9
-rw-r--r--test/files/scalap/classPrivate/result.test10
-rw-r--r--test/files/scalap/classWithExistential/result.test4
-rw-r--r--test/files/scalap/covariantParam/result.test4
-rw-r--r--test/files/scalap/defaultParameter/A.scala3
-rw-r--r--test/files/scalap/defaultParameter/result.test3
-rw-r--r--test/files/scalap/typeAnnotations/A.scala9
-rw-r--r--test/files/scalap/typeAnnotations/result.test8
-rwxr-xr-x[-rw-r--r--]test/files/script/fact.scala0
-rwxr-xr-xtest/partest22
-rwxr-xr-xtest/partest.bat6
-rw-r--r--test/pending/buildmanager/t2443/BitSet.scala2
-rw-r--r--test/pending/buildmanager/t2443/t2443.changes/BitSet2.scala1
-rw-r--r--test/pending/buildmanager/t2443/t2443.check6
-rw-r--r--test/pending/buildmanager/t2443/t2443.test3
-rw-r--r--test/pending/continuations-run/example0.scala9
-rw-r--r--test/pending/continuations-run/example1.scala9
-rw-r--r--test/pending/continuations-run/example16.scala9
-rw-r--r--test/pending/continuations-run/example2.scala9
-rw-r--r--test/pending/continuations-run/example3.scala9
-rw-r--r--test/pending/continuations-run/example4.scala9
-rw-r--r--test/pending/continuations-run/example5.scala9
-rw-r--r--test/pending/continuations-run/example6.scala9
-rw-r--r--test/pending/continuations-run/example7.scala9
-rw-r--r--test/pending/continuations-run/example8.scala9
-rw-r--r--test/pending/continuations-run/example9.scala9
-rw-r--r--test/pending/continuations-run/foreach.check4
-rw-r--r--test/pending/continuations-run/foreach.scala33
-rw-r--r--test/pending/jvm/actor-executor4.check21
-rw-r--r--test/pending/jvm/actor-executor4.scala64
-rw-r--r--test/pending/neg/bug1210.check (renamed from test/files/neg/bug1210.check)0
-rw-r--r--test/pending/pos/t0644.scala11
-rw-r--r--test/pending/pos/t1380/gnujaxp.jar.desired.sha1 (renamed from test/files/pos/t1380/gnujaxp.jar.desired.sha1)0
-rw-r--r--test/pending/pos/t1380/hallo.scala (renamed from test/files/pos/t1380/hallo.scala)0
-rw-r--r--test/pending/pos/t1659.scala4
-rw-r--r--test/pending/pos/t2060.scala28
-rw-r--r--test/pending/run/bug1697.scala (renamed from test/files/run/bug1697.scala)0
-rwxr-xr-xtest/pending/run/bug2365/run13
-rw-r--r--test/pending/run/bugs425-and-816.scala27
-rw-r--r--test/pending/run/instanceOfAndTypeMatching.scala193
-rw-r--r--test/pending/run/subarray.check2
-rw-r--r--test/pending/script/bug2365.javaopts (renamed from test/pending/run/bug2365/bug2365.javaopts)0
-rw-r--r--test/pending/script/bug2365/Test.scala (renamed from test/pending/run/bug2365/Test.scala)6
-rwxr-xr-xtest/pending/script/bug2365/bug2365.scala9
-rw-r--r--test/support/annotations/NestedAnnotations.java (renamed from test/files/jvm/NestedAnnotations.java)0
-rw-r--r--test/support/annotations/OuterEnum.java (renamed from test/files/jvm/OuterEnum.java)0
-rw-r--r--test/support/annotations/OuterTParams.java (renamed from test/files/jvm/OuterTParams.java)0
-rw-r--r--test/support/annotations/SourceAnnotation.java (renamed from test/files/jvm/SourceAnnotation.java)0
-rwxr-xr-xtest/support/annotations/mkAnnotationsJar.sh (renamed from test/files/jvm/mkAnnotationsJar.sh)0
-rwxr-xr-xtools/abspath9
-rwxr-xr-xtools/cpof30
-rwxr-xr-xtools/diffPickled51
-rwxr-xr-xtools/git-get-rev5
-rwxr-xr-xtools/packcp5
-rwxr-xr-xtools/pathResolver11
-rwxr-xr-xtools/quickcp8
-rwxr-xr-xtools/scalawhich4
-rwxr-xr-xtools/showPickled32
-rwxr-xr-xtools/starrcp5
-rwxr-xr-xtools/strapcp8
-rwxr-xr-xtools/truncate (renamed from truncate)0
1245 files changed, 29458 insertions, 18147 deletions
diff --git a/META-INF/MANIFEST.MF b/META-INF/MANIFEST.MF
index 2ef810c2f2..3ab0a57c5c 100644
--- a/META-INF/MANIFEST.MF
+++ b/META-INF/MANIFEST.MF
@@ -28,11 +28,13 @@ Export-Package:
scala.tools.nsc.matching,
scala.tools.nsc.plugins,
scala.tools.nsc.reporters,
+ scala.tools.nsc.settings,
scala.tools.nsc.symtab,
scala.tools.nsc.symtab.classfile,
scala.tools.nsc.transform,
scala.tools.nsc.typechecker,
scala.tools.nsc.util,
+ scala.tools.util,
ch.epfl.lamp.compiler.msil,
ch.epfl.lamp.compiler.msil.emit,
ch.epfl.lamp.compiler.msil.util,
diff --git a/README b/README
index 5bf8d51e0c..9a39a8a8fe 100644
--- a/README
+++ b/README
@@ -12,9 +12,10 @@ Part I. The repository layout
Follows the file layout of the Scala repository. Files marked with a † are not
part of the Subversion repository but are either automatically generated by the
-build script or user-created if needed.
+build script or user-created if needed. This is not a complete listing.
scala/
+ bin/ Developer utilities.
build/ † Temporary staging area for build products.
build.excludes † An optional build configuration file.
build.number The version number of the current distribution.
@@ -23,24 +24,24 @@ scala/
dist/ † The destination folder of Scala distributions.
docs/ Documentation of Scala. More in its own module.
development/ Developer documentation.
- examples/ Scala example files.
- man/ UNIX manual files.
+ examples/ Scala source code examples.
lib/ Pre-compiled libraries for the build.
fjbg.jar The Java byte-code generation library.
- scala-compiler.jar The last stable version of the Scala compiler.
- scala-library.jar The last stable version of the Scala library.
+ scala-compiler.jar The stable reference version (aka 'starr') of the Scala compiler
+ scala-library.jar The stable reference version (aka 'starr') of the Scala library.
+ scala-library-src.jar A snapshot of the source code which was used to build starr.
ant/ Support libraries for the build tool.
ant-contrib.jar Provides additional features for Ant
vizant.jar Provides DOT graph generation for Ant
README The file you are currently reading.
sandbox/ † A folder to test code etc.
src/ All the source files of Scala.
+ actors/ The sources of the Actor library.
compiler/ The sources of the Scala compiler.
- library/ The sources of the Scala library.
+ library/ The sources of the core Scala library.
+ swing/ The sources of the Swing library.
test/ The Scala test suite.
-Any change to this structure requires a modification of the 'build.xml' file.
-
Part IV. Building Scala with SABBUS
--------------------------------------------------------------------------------
@@ -50,7 +51,7 @@ LAYERS:
In order to guarantee the bootstrapping of the Scala compiler, SABBUS builds Scala in layers. Each layer is a complete compiled Scala compiler and library. A superior layer is always compiled by the layer just below it. Here is a short description of the four layers that SABBUS uses, from bottom to top:
-'starr': the stable reference Scala release which is shared by all the developers. It is found in the repository as 'lib/scala.compiler.jar' and 'lib/scala-library.jar'. Any committable source code must be compiled directly by starr to guarantee the bootstrapping of the compiler.
+'starr': the stable reference Scala release which is shared by all the developers. It is found in the repository as 'lib/scala-compiler.jar' and 'lib/scala-library.jar'. Any committable source code must be compiled directly by starr to guarantee the bootstrapping of the compiler.
'locker': the local reference which is compiled by starr and is the work compiler in a typical development cycle. When it has been built once, it is “frozen†in this state. Updating it to fit the current source code must be explicitly required (see below).
@@ -60,7 +61,7 @@ In order to guarantee the bootstrapping of the Scala compiler, SABBUS builds Sca
DEPENDANT CHANGES:
-SABBUS compiles, for each layer, the Scala library first and the compiler next. That means that any changes in the library can immediately be used in the compiler without an intermediate build. On the other hand, if building the library requires changes in the compiler a new starr (or in some case only locker if bootstrapping is still possible) compiler must be built in-between.
+SABBUS compiles, for each layer, the Scala library first and the compiler next. That means that any changes in the library can immediately be used in the compiler without an intermediate build. On the other hand, if building the library requires changes in the compiler, a new locker must be built if bootstrapping is still possible, or a new starr if it is not.
Part III. Requirements for SABBUS
--------------------------------------------------------------------------------
@@ -73,30 +74,34 @@ Part IV. Common use-cases
--------------------------------------------------------------------------------
'ant -p'
- Prints-out information about all available targets in the build script.
+ Prints out information about the commonly used ant targets. The interested developer can find the rest in the xml files.
'ant' or 'ant build'
A quick compilation (to quick) of your changes using the locker compiler.
- This will rebuild all quick if locker changed.
- This will also rebuild locker if starr changed.
-'ln -s build/quick/bin bin' (once)
-'ant && bin/scalac -d sandbox sandbox/test.scala && bin/scala -cp sandbox Test'
+'ln -s build/quick/bin qbin' (once)
+'ant && qbin/scalac -d sandbox sandbox/test.scala && qbin/scala -cp sandbox Test'
Incrementally builds quick, and then uses it to compile and run the file
'sandbox/test.scala'. This is a typical debug cycle.
-'ant newlocker'
+'ant replacelocker'
"unfreezes" locker by updating it to match the current source code.
- This will delete quick so as not to mix classes compiled with different
versions of locker.
'ant test'
Tests that your code is working and fit to be committed.
- - Runs the test suite on quick.
+ - Runs the test suite and bootstrapping test on quick.
+ - You can run the suite only (skipping strap) with 'ant test.suite'.
'ant docs'
Generates the HTML documentation for the library from the sources using the
- scaladoc tool in quick.
+ scaladoc tool in quick. Note: on most machines this requires more heap than
+ is allocate by default. You can adjust the parameters with ANT_OPTS.
+ Example command line:
+ ANT_OPTS="-Xms512M -Xmx2048M -Xss1M -XX:MaxPermSize=128M" ant docs
'ant dist'
Builds a distribution.
@@ -115,7 +120,9 @@ Part IV. Common use-cases
'ant all.clean'
Removes all build files (including locker) and all distributions.
-Additional ant targets can be seen via 'ant -p'.
+Many of these targets offer a variant which runs with -optimise enabled.
+Optimized targets include build-opt, test-opt, dist-opt, fastdist-opt,
+replacestarr-opt, replacelocker-opt, and distpack-opt.
Part V. Contributing to Scala
--------------------------------------------------------------------------------
@@ -124,7 +131,7 @@ If you wish to contribute, you can find all of the necessary information on
the official Scala website: www.scala-lang.org.
Specifically, you can subscribe to the Scala mailing lists, read all of the
-available documentation, and browse the live SVN repository. You can contact
+available documentation, and browse the live SVN repository. You can contact
the Scala team by sending us a message on one of the mailing lists, or by using
the available contact form.
diff --git a/build.xml b/build.xml
index 32acdbfac8..efaa1941da 100644
--- a/build.xml
+++ b/build.xml
@@ -125,10 +125,15 @@ END-USER TARGETS
</target>
<target name="newlibs"
- description="Requires libraries (MSIL, FJBG, FORKJOIN) to be rebuilt. Add this target before any other if class file format is incompatible.">
+ description="Requires libraries (MSIL, FJBG) to be rebuilt. Add this target before any other if class file format is incompatible.">
<property name="libs.outdated" value="yes"/>
</target>
+ <target name="newforkjoin"
+ description="Requires forkjoin library to be rebuilt. Add this target before any other if class file format is incompatible.">
+ <property name="forkjoin.outdated" value="yes"/>
+ </target>
+
<!-- ===========================================================================
PROPERTIES
============================================================================ -->
@@ -142,7 +147,9 @@ PROPERTIES
<property name="lib.dir" value="${basedir}/lib"/>
<property name="lib-ant.dir" value="${lib.dir}/ant"/>
<property name="src.dir" value="${basedir}/src"/>
- <property name="test.dir" value="${basedir}/test"/>
+ <property name="partest.rootdir" location="test" />
+ <property name="partest.srcdir.default" value="files" />
+ <property name="partest.options" value="" />
<!-- Loads custom properties definitions -->
<property file="${basedir}/build.properties"/>
@@ -156,6 +163,7 @@ PROPERTIES
<property name="comp.starr.jar" value="${lib.dir}/scala-compiler.jar"/>
<property name="jline.jar" value="${lib.dir}/jline.jar"/>
<property name="ant.jar" value="${ant.home}/lib/ant.jar"/>
+ <property name="scalacheck.jar" value="${lib.dir}/scalacheck.jar"/>
<!-- Sets location of build folders -->
<property name="build.dir" value="${basedir}/build"/>
@@ -170,7 +178,7 @@ PROPERTIES
<property name="dists.dir" value="${basedir}/dists"/>
<property name="copyright.string" value="Copyright 2002-2010, LAMP/EPFL"/>
- <property name="partest.version.number" value="0.9.2"/>
+ <property name="partest.version.number" value="0.9.3"/>
<!-- These are NOT the flags used to run SuperSabbus, but the ones written
into the script runners created with scala.tools.ant.ScalaTool -->
@@ -178,16 +186,11 @@ PROPERTIES
<!-- if ANT_OPTS is already set by the environment, it will be unaltered,
but if it is unset it will take this default value. -->
- <property name="env.ANT_OPTS" value="-Xms512M -Xmx1024M -Xss1M -XX:MaxPermSize=128M" />
+ <property name="env.ANT_OPTS" value="-Xms128M -Xmx1024M -Xss1M -XX:MaxPermSize=128M" />
<!-- to find max heap usage: -Xaprof ; currently at 980M for locker.comp -->
- <echo message="Using ANT_OPTS: ${env.ANT_OPTS}" />
- <property
- name="scalacfork.jvmargs"
- value="${env.ANT_OPTS}"/>
-
- <property name="javac.cmd" value="${env.JAVA_HOME}/bin/javac"/>
- <property name="java.cmd" value="${env.JAVA_HOME}/bin/java"/>
+ <property name="scalacfork.jvmargs" value="${env.ANT_OPTS}" />
+ <echo message="Using scalacfork.jvmargs: ${scalacfork.jvmargs}" />
<!-- ===========================================================================
INITIALISATION
@@ -213,16 +216,34 @@ INITIALISATION
<condition property="os.win">
<os family="windows"/>
</condition>
- <!-- Finding out SVN revision -->
+ <!-- Finding out SVN revision, svn style -->
<exec executable="svn" outputproperty="svn.out"
failifexecutionfails="false">
<arg line=" info ${basedir}"/>
</exec>
<propertyregex
- property="svn.number" input="${svn.out}" select="\1"
+ property="svn.number.svn" input="${svn.out}" select="\1"
regexp="Revision: ([0-9]+)"
defaultValue="0"/>
+
+ <!-- Both clauses of the conditional set svn.number -->
+ <if>
+ <equals arg1="${svn.number.svn}" arg2="0" />
+ <then>
+ <!-- Finding SVN revision, git style -->
+ <exec osfamily="unix" executable="tools/git-get-rev" outputproperty="svn.number.git" failifexecutionfails="false" />
+ <propertyregex
+ property="svn.number" input="${svn.number.git}" select="\1"
+ regexp="\D*?(\d+)"
+ defaultValue="0"/>
+ </then>
+ <else>
+ <property name="svn.number" value="${svn.number.svn}" />
+ </else>
+ </if>
+
<property name="init.avail" value="yes"/>
+
<!-- Generating version number -->
<property file="${basedir}/build.number"/>
<property
@@ -234,10 +255,10 @@ INITIALISATION
<!-- Define tasks that can be run with Starr -->
<path id="starr.classpath">
<pathelement location="${lib.starr.jar}"/>
- <pathelement location="${lib.dir}/forkjoin.jar"/>
<pathelement location="${comp.starr.jar}"/>
<pathelement location="${lib.dir}/fjbg.jar"/>
<pathelement location="${lib.dir}/msil.jar"/>
+ <pathelement location="${lib.dir}/forkjoin.jar"/>
<pathelement location="${ant.jar}"/>
</path>
<taskdef resource="scala/tools/ant/sabbus/antlib.xml" classpathref="starr.classpath"/>
@@ -319,12 +340,12 @@ LOCAL REFERENCE BUILD (LOCKER)
<include name="**/*.scala"/>
<compilationpath>
<pathelement location="${build-locker.dir}/classes/library"/>
- <pathelement location="${lib.dir}/forkjoin.jar"/>
<pathelement location="${build-locker.dir}/classes/compiler"/>
<pathelement location="${lib.dir}/fjbg.jar"/>
<pathelement location="${lib.dir}/msil.jar"/>
- <pathelement location="${jline.jar}"/>
+ <pathelement location="${lib.dir}/forkjoin.jar"/>
<pathelement location="${ant.jar}"/>
+ <pathelement location="${jline.jar}"/>
</compilationpath>
</scalacfork>
<propertyfile file="${build-locker.dir}/classes/compiler/compiler.properties">
@@ -346,15 +367,15 @@ LOCAL REFERENCE BUILD (LOCKER)
<touch file="${build-locker.dir}/compiler.complete" verbose="no"/>
<stopwatch name="locker.comp.timer" action="total"/>
</target>
-
+
<target name="locker.done" depends="locker.comp">
<touch file="${build-locker.dir}/all.complete" verbose="no"/>
<path id="locker.classpath">
<pathelement location="${build-locker.dir}/classes/library"/>
- <pathelement location="${lib.dir}/forkjoin.jar"/>
<pathelement location="${build-locker.dir}/classes/compiler"/>
<pathelement location="${lib.dir}/fjbg.jar"/>
<pathelement location="${lib.dir}/msil.jar"/>
+ <pathelement location="${lib.dir}/forkjoin.jar"/>
<pathelement location="${ant.jar}"/>
</path>
</target>
@@ -423,6 +444,7 @@ QUICK BUILD (QUICK)
<include name="library/**"/>
<include name="dbc/**"/>
<include name="actors/**"/>
+ <include name="continuations/**"/>
<include name="swing/**"/>
</srcfiles>
</uptodate>
@@ -510,20 +532,26 @@ QUICK BUILD (QUICK)
</target>
<target name="quick.newlibs" depends="quick.lib" if="libs.outdated">
- <antcall target="libs.done">
- <param name="forkjoin.jar" value="${build-libs.dir}/forkjoin.jar"/>
- <param name="fjbg.jar" value="${build-libs.dir}/fjbg.jar"/>
- <param name="msil.jar" value="${build-libs.dir}/msil.jar"/>
- </antcall>
+ <antcall target="libs.done" inheritRefs="true"/>
+ <property name="fjbg.jar" value="${build-libs.dir}/fjbg.jar"/>
+ <property name="msil.jar" value="${build-libs.dir}/msil.jar"/>
</target>
<target name="quick.libs" depends="quick.newlibs" unless="libs.outdated">
- <property name="forkjoin.jar" value="${lib.dir}/forkjoin.jar"/>
<property name="fjbg.jar" value="${lib.dir}/fjbg.jar"/>
<property name="msil.jar" value="${lib.dir}/msil.jar"/>
</target>
-
- <target name="quick.pre-comp" depends="quick.libs">
+
+ <target name="quick.newforkjoin" depends="quick.libs" if="forkjoin.outdated">
+ <antcall target="forkjoin.done" inheritRefs="true"/>
+ <property name="forkjoin.jar" value="${build-libs.dir}/forkjoin.jar"/>
+ </target>
+
+ <target name="quick.forkjoin" depends="quick.newforkjoin" unless="forkjoin.outdated">
+ <property name="forkjoin.jar" value="${lib.dir}/forkjoin.jar"/>
+ </target>
+
+ <target name="quick.pre-comp" depends="quick.forkjoin">
<uptodate property="quick.comp.available" targetfile="${build-quick.dir}/compiler.complete">
<srcfiles dir="${src.dir}/compiler"/>
</uptodate>
@@ -541,12 +569,12 @@ QUICK BUILD (QUICK)
<include name="**/*.scala"/>
<compilationpath>
<pathelement location="${build-quick.dir}/classes/library"/>
- <pathelement location="${forkjoin.jar}"/>
<pathelement location="${build-quick.dir}/classes/compiler"/>
<pathelement location="${fjbg.jar}"/>
<pathelement location="${msil.jar}"/>
- <pathelement location="${jline.jar}"/>
+ <pathelement location="${forkjoin.jar}"/>
<pathelement location="${ant.jar}"/>
+ <pathelement location="${jline.jar}"/>
</compilationpath>
</scalacfork>
<propertyfile file="${build-quick.dir}/classes/compiler/compiler.properties">
@@ -568,7 +596,53 @@ QUICK BUILD (QUICK)
<stopwatch name="quick.comp.timer" action="total"/>
</target>
- <target name="quick.pre-scalap" depends="quick.comp">
+ <target name="quick.pre-plugins" depends="quick.comp">
+ <uptodate property="quick.plugins.available" targetfile="${build-quick.dir}/plugins.complete">
+ <srcfiles dir="${src.dir}/continuations"/>
+ </uptodate>
+ </target>
+
+ <target name="quick.plugins" depends="quick.pre-plugins" unless="quick.plugins.available">
+ <stopwatch name="quick.plugins.timer"/>
+ <mkdir dir="${build-quick.dir}/classes/continuations-plugin"/>
+ <scalacfork
+ destdir="${build-quick.dir}/classes/continuations-plugin"
+ compilerpathref="locker.classpath"
+ params="${scalac.args.quick}"
+ srcdir="${src.dir}/continuations/plugin"
+ jvmargs="${scalacfork.jvmargs}">
+ <include name="**/*.scala"/>
+ <compilationpath>
+ <pathelement location="${build-quick.dir}/classes/library"/>
+ <pathelement location="${build-quick.dir}/classes/compiler"/>
+ </compilationpath>
+ </scalacfork>
+ <copy
+ file="${src.dir}/continuations/plugin/scalac-plugin.xml"
+ todir="${build-quick.dir}/classes/continuations-plugin"/>
+ <!-- not very nice to create jar here but needed to load plugin -->
+ <mkdir dir="${build-quick.dir}/plugins"/>
+ <jar destfile="${build-quick.dir}/plugins/continuations.jar">
+ <fileset dir="${build-quick.dir}/classes/continuations-plugin"/>
+ </jar>
+ <!-- might split off library part into its own ant target -->
+ <scalacfork
+ destdir="${build-quick.dir}/classes/library"
+ compilerpathref="locker.classpath"
+ params="${scalac.args.quick} -Xpluginsdir ${build-quick.dir}/plugins -Xplugin-require:continuations -P:continuations:enable"
+ srcdir="${src.dir}/continuations/library"
+ jvmargs="${scalacfork.jvmargs}">
+ <include name="**/*.scala"/>
+ <compilationpath>
+ <pathelement location="${build-quick.dir}/classes/library"/>
+ <pathelement location="${lib.dir}/forkjoin.jar"/>
+ </compilationpath>
+ </scalacfork>
+ <touch file="${build-quick.dir}/plugins.complete" verbose="no"/>
+ <stopwatch name="quick.plugins.timer" action="total"/>
+ </target>
+
+ <target name="quick.pre-scalap" depends="quick.plugins">
<uptodate property="quick.scalap.available" targetfile="${build-quick.dir}/scalap.complete">
<srcfiles dir="${src.dir}/scalap"/>
</uptodate>
@@ -587,8 +661,8 @@ QUICK BUILD (QUICK)
<compilationpath>
<pathelement location="${build-quick.dir}/classes/library"/>
<pathelement location="${build-quick.dir}/classes/compiler"/>
- <pathelement location="${build-quick.dir}/classes/partest"/>
<pathelement location="${build-quick.dir}/classes/scalap"/>
+ <pathelement location="${build-quick.dir}/classes/partest"/>
<pathelement location="${ant.jar}"/>
</compilationpath>
</scalacfork>
@@ -631,6 +705,7 @@ QUICK BUILD (QUICK)
<pathelement location="${build-quick.dir}/classes/scalap"/>
<pathelement location="${build-quick.dir}/classes/partest"/>
<pathelement location="${ant.jar}"/>
+ <pathelement location="${scalacheck.jar}"/>
</compilationpath>
</scalacfork>
<propertyfile file="${build-quick.dir}/classes/partest/partest.properties">
@@ -655,11 +730,11 @@ QUICK BUILD (QUICK)
<target name="quick.bin" depends="quick.pre-bin" unless="quick.bin.available">
<path id="quick.bin.classpath">
<pathelement location="${build-quick.dir}/classes/library"/>
- <pathelement location="${forkjoin.jar}"/>
<pathelement location="${build-quick.dir}/classes/compiler"/>
<pathelement location="${build-quick.dir}/classes/scalap"/>
<pathelement location="${fjbg.jar}"/>
<pathelement location="${msil.jar}"/>
+ <pathelement location="${forkjoin.jar}"/>
<pathelement location="${jline.jar}"/>
</path>
<taskdef name="quick-bin" classname="scala.tools.ant.ScalaTool" classpathref="quick.bin.classpath"/>
@@ -700,10 +775,10 @@ QUICK BUILD (QUICK)
<target name="quick.done" depends="quick.bin">
<path id="quick.classpath">
<pathelement location="${build-quick.dir}/classes/library"/>
- <pathelement location="${forkjoin.jar}"/>
<pathelement location="${build-quick.dir}/classes/compiler"/>
<pathelement location="${fjbg.jar}"/>
<pathelement location="${msil.jar}"/>
+ <pathelement location="${forkjoin.jar}"/>
<pathelement location="${ant.jar}"/>
</path>
</target>
@@ -772,7 +847,21 @@ PACKED QUICK BUILD (PACK)
<copy file="${jline.jar}" toDir="${build-pack.dir}/lib"/>
</target>
- <target name="pack.pre-partest" depends="pack.comp">
+ <target name="pack.pre-plugins" depends="pack.comp">
+ <uptodate
+ property="pack.plugins.available"
+ targetfile="${build-pack.dir}/plugins/continuations.jar"
+ srcfile="${build-quick.dir}/plugins.complete"/>
+ </target>
+
+ <target name="pack.plugins" depends="pack.pre-plugins" unless="pack.plugins.available">
+ <mkdir dir="${build-pack.dir}/plugins"/>
+ <jar destfile="${build-pack.dir}/plugins/continuations.jar">
+ <fileset dir="${build-quick.dir}/classes/continuations-plugin"/>
+ </jar>
+ </target>
+
+ <target name="pack.pre-partest" depends="pack.plugins">
<uptodate
property="pack.partest.available"
targetfile="${build-pack.dir}/lib/scala-partest.jar"
@@ -842,6 +931,15 @@ PACKED QUICK BUILD (PACK)
<chmod perm="ugo+rx" file="${build-pack.dir}/bin/scaladoc"/>
<chmod perm="ugo+rx" file="${build-pack.dir}/bin/fsc"/>
<chmod perm="ugo+rx" file="${build-pack.dir}/bin/scalap"/>
+
+ <mkdir dir="${build-pack.dir}/etc"/>
+ <exec osfamily="unix" executable="${build-pack.dir}/bin/scala" output="${build-pack.dir}/etc/scala_completion.sh" failifexecutionfails="false" >
+ <arg line="scala.tools.util.BashCompletion" />
+ </exec>
+ <!-- <exec append="true" osfamily="unix" executable="${build-pack.dir}/bin/scala" output="${build-pack.dir}/etc/scala_completion.sh" failifexecutionfails="false" >
+ <arg line="scala.tools.partest.PartestSpecDryRun" />
+ </exec> -->
+
<touch file="${build-pack.dir}/bin.complete" verbose="no"/>
</target>
@@ -978,12 +1076,12 @@ BOOTSTRAPPING BUILD (STRAP)
<include name="**/*.scala"/>
<compilationpath>
<pathelement location="${build-strap.dir}/classes/library"/>
- <pathelement location="${forkjoin.jar}"/>
<pathelement location="${build-strap.dir}/classes/compiler"/>
<pathelement location="${fjbg.jar}"/>
<pathelement location="${msil.jar}"/>
- <pathelement location="${jline.jar}"/>
+ <pathelement location="${forkjoin.jar}"/>
<pathelement location="${ant.jar}"/>
+ <pathelement location="${jline.jar}"/>
</compilationpath>
</scalacfork>
<propertyfile file="${build-strap.dir}/classes/compiler/compiler.properties">
@@ -1004,8 +1102,54 @@ BOOTSTRAPPING BUILD (STRAP)
<touch file="${build-strap.dir}/compiler.complete" verbose="no"/>
<stopwatch name="strap.comp.timer" action="total"/>
</target>
-
- <target name="strap.pre-scalap" depends="strap.comp">
+
+ <target name="strap.pre-plugins" depends="strap.comp">
+ <uptodate property="strap.plugins.available" targetfile="${build-strap.dir}/plugins.complete">
+ <srcfiles dir="${src.dir}/continuations"/>
+ </uptodate>
+ </target>
+
+ <target name="strap.plugins" depends="strap.pre-plugins" unless="strap.plugins.available">
+ <stopwatch name="strap.plugins.timer"/>
+ <mkdir dir="${build-strap.dir}/classes/continuations-plugin"/>
+ <scalacfork
+ destdir="${build-strap.dir}/classes/continuations-plugin"
+ compilerpathref="pack.classpath"
+ params="${scalac.args.quick}"
+ srcdir="${src.dir}/continuations/plugin"
+ jvmargs="${scalacfork.jvmargs}">
+ <include name="**/*.scala"/>
+ <compilationpath>
+ <pathelement location="${build-strap.dir}/classes/library"/>
+ <pathelement location="${build-strap.dir}/classes/compiler"/>
+ </compilationpath>
+ </scalacfork>
+ <copy
+ file="${src.dir}/continuations/plugin/scalac-plugin.xml"
+ todir="${build-strap.dir}/classes/continuations-plugin"/>
+ <!-- not very nice to create jar here but needed to load plugin -->
+ <mkdir dir="${build-strap.dir}/plugins"/>
+ <jar destfile="${build-strap.dir}/plugins/continuations.jar">
+ <fileset dir="${build-strap.dir}/classes/continuations-plugin"/>
+ </jar>
+ <!-- might split off library part into its own ant target -->
+ <scalacfork
+ destdir="${build-strap.dir}/classes/library"
+ compilerpathref="pack.classpath"
+ params="${scalac.args.quick} -Xpluginsdir ${build-quick.dir}/plugins -Xplugin-require:continuations -P:continuations:enable"
+ srcdir="${src.dir}/continuations/library"
+ jvmargs="${scalacfork.jvmargs}">
+ <include name="**/*.scala"/>
+ <compilationpath>
+ <pathelement location="${build-strap.dir}/classes/library"/>
+ <pathelement location="${lib.dir}/forkjoin.jar"/>
+ </compilationpath>
+ </scalacfork>
+ <touch file="${build-strap.dir}/plugins.complete" verbose="no"/>
+ <stopwatch name="strap.plugins.timer" action="total"/>
+ </target>
+
+ <target name="strap.pre-scalap" depends="strap.plugins">
<uptodate property="strap.scalap.available" targetfile="${build-strap.dir}/scalap.complete">
<srcfiles dir="${src.dir}/scalap"/>
</uptodate>
@@ -1024,8 +1168,8 @@ BOOTSTRAPPING BUILD (STRAP)
<compilationpath>
<pathelement location="${build-strap.dir}/classes/library"/>
<pathelement location="${build-strap.dir}/classes/compiler"/>
- <pathelement location="${build-strap.dir}/classes/partest"/>
<pathelement location="${build-strap.dir}/classes/scalap"/>
+ <pathelement location="${build-strap.dir}/classes/partest"/>
<pathelement location="${ant.jar}"/>
</compilationpath>
</scalacfork>
@@ -1067,6 +1211,7 @@ BOOTSTRAPPING BUILD (STRAP)
<pathelement location="${build-strap.dir}/classes/compiler"/>
<pathelement location="${build-strap.dir}/classes/scalap"/>
<pathelement location="${build-strap.dir}/classes/partest"/>
+ <pathelement location="${scalacheck.jar}"/>
<pathelement location="${ant.jar}"/>
</compilationpath>
</scalacfork>
@@ -1092,6 +1237,13 @@ LIBRARIES (MSIL, FJBG maybe later)
<target name="libs.start"/>
<target name="libs.pre-forkjoin" depends="libs.start">
+ <property name="java6.home" value="/home/linuxsoft/apps/java-1.6"/>
+ <fail message="Compiling forkjoin.jar requires java 1.6. Please set the property `java6.home` in build.properties or using `-Djava6.home=/path/to/java6`">
+ <condition><not>
+ <available file="${java6.home}/bin/javac"/>
+ </not></condition>
+ </fail>
+
<uptodate property="libs.forkjoin.available" targetfile="${build-libs.dir}/forkjoin.complete">
<srcfiles dir="${src.dir}/forkjoin">
<include name="**/*.java"/>
@@ -1103,6 +1255,9 @@ LIBRARIES (MSIL, FJBG maybe later)
<target name="libs.forkjoin" depends="libs.pre-forkjoin" unless="libs.forkjoin.available">
<mkdir dir="${build-libs.dir}/classes/forkjoin"/>
<javac
+ executable="${java6.home}/bin/javac"
+ fork="yes"
+ compiler="javac1.6"
srcdir="${src.dir}/forkjoin"
destdir="${build-libs.dir}/classes/forkjoin"
classpath="${build-libs.dir}/classes/forkjoin"
@@ -1140,6 +1295,7 @@ LIBRARIES (MSIL, FJBG maybe later)
classpath="${build-libs.dir}/classes/msil"
includes="**/*.java"
excludes="**/tests/**"
+ debug="true"
target="1.5" source="1.4">
<compilerarg line="${javac.args}"/>
</javac>
@@ -1198,8 +1354,10 @@ LIBRARIES (MSIL, FJBG maybe later)
<fileset dir="${build-libs.dir}/classes/fjbg"/>
</jar>
</target>
-
- <target name="libs.done" depends="libs.msilpack, libs.fjbgpack, libs.forkjoinpack"/>
+
+ <target name="libs.done" depends="libs.msilpack, libs.fjbgpack"/>
+
+ <target name="forkjoin.done" depends="libs.forkjoinpack"/>
<target name="libs.clean" depends="pack.clean">
<delete dir="${build-libs.dir}" includeemptydirs="yes" quiet="yes" failonerror="no"/>
@@ -1227,12 +1385,16 @@ DOCUMENTATION
<mkdir dir="${build-docs.dir}/library"/>
<scaladoc
destdir="${build-docs.dir}/library"
- doctitle="Scala ${version.number} API"
+ doctitle="Scala Standard Library"
+ docversion="${version.number}"
+ docsourceurl="https://lampsvn.epfl.ch/trac/scala/browser/scala/trunk/src/"
+ sourcepath="${src.dir}"
classpathref="pack.classpath">
<src>
<files includes="${src.dir}/actors"/>
<files includes="${src.dir}/library/scala"/>
<files includes="${src.dir}/swing"/>
+ <files includes="${src.dir}/continuations/library"/>
</src>
<include name="**/*.scala"/>
<exclude name="reflect/Code.scala"/>
@@ -1247,6 +1409,7 @@ DOCUMENTATION
<exclude name="runtime/ScalaRunTime.scala"/>
<exclude name="runtime/StreamCons.scala"/>
<exclude name="runtime/StringAdd.scala"/>
+ <exclude name="scala/swing/test/**"/>
</scaladoc>
<touch file="${build-docs.dir}/library.complete" verbose="no"/>
<stopwatch name="docs.lib.timer" action="total"/>
@@ -1309,7 +1472,10 @@ DOCUMENTATION
<mkdir dir="${build-docs.dir}/compiler"/>
<scaladoc
destdir="${build-docs.dir}/compiler"
- doctitle="Scala Compiler ${version.number} API"
+ doctitle="Scala Compiler"
+ docversion="${version.number}"
+ docsourceurl="https://lampsvn.epfl.ch/trac/scala/browser/scala/trunk/src/"
+ sourcepath="${src.dir}"
classpathref="pack.classpath"
srcdir="${src.dir}/compiler">
<include name="**/*.scala"/>
@@ -1334,45 +1500,33 @@ BOOTRAPING TEST AND TEST SUITE
<exclude name="**/*.properties"/>
<exclude name="bin/**"/>
<exclude name="*.complete"/>
+ <exclude name="plugins/*.jar"/>
</same>
</target>
- <target name="test.run" depends="pack.done">
- <partest showlog="yes" erroronfailed="yes" javacmd="${java.home}/bin/java"
- timeout="1200000" javaccmd="${javac.cmd}"
- scalacopts="${scalac.args.optimise}">
- <classpath>
- <path refid="pack.classpath"/>
- <fileset dir="${test.dir}/files/lib" includes="*.jar"/>
- </classpath>
- <runtests dir="${test.dir}/files">
- <include name="run/**/*.scala"/>
- <include name="jvm/**/*.scala"/>
- </runtests>
+ <target name="test.suite" depends="pack.done">
+ <partest classpathref="pack.classpath">
+ <env key="PATH" path="${build-pack.dir}/bin:${env.PATH}" />
+ <sysproperty key="partest.srcdir" value="files" />
+ <syspropertyset>
+ <propertyref prefix="partest"/>
+ </syspropertyset>
</partest>
</target>
- <target name="test.suite" depends="pack.done">
- <partest showlog="yes" erroronfailed="yes" javacmd="${java.home}/bin/java"
- timeout="2400000" javaccmd="${javac.cmd}"
- scalacopts="${scalac.args.optimise}">
- <classpath>
- <path refid="pack.classpath"/>
- <fileset dir="${test.dir}/files/lib" includes="*.jar"/>
- </classpath>
- <postests dir="${test.dir}/files/pos" includes="*.scala"/>
- <negtests dir="${test.dir}/files/neg" includes="*.scala"/>
- <runtests dir="${test.dir}/files">
- <include name="run/**/*.scala"/>
- </runtests>
- <jvmtests dir="${test.dir}/files/jvm" includes="*.scala"/>
- <residenttests dir="${test.dir}/files/res" includes="*.res"/>
- <!-- <scripttests dir="${test.dir}/files/script" includes="*.scala"/> -->
- <scalaptests dir="${test.dir}/files/scalap" includes="**/*.scala"/>
- </partest>
+ <target name="test.continuations.suite" depends="pack.done">
+ <partest classpathref="pack.classpath">
+ <env key="PATH" path="${build-pack.dir}/bin:${env.PATH}" />
+ <sysproperty key="partest.srcdir" value="continuations" />
+ <sysproperty key="partest.scalacopts" value="${scalac.args.optimise} -Xpluginsdir ${build-quick.dir}/plugins -Xplugin-require:continuations -P:continuations:enable" />
+ <sysproperty key="partest.runsets" value="neg run" />
+ <syspropertyset>
+ <propertyref prefix="partest"/>
+ </syspropertyset>
+ </partest>
</target>
-
- <target name="test.done" depends="test.suite, test.stability"/>
+
+ <target name="test.done" depends="test.suite, test.continuations.suite, test.stability"/>
<!-- ===========================================================================
DISTRIBUTION
@@ -1396,6 +1550,14 @@ DISTRIBUTION
<chmod perm="ugo+rx" file="${dist.dir}/bin/scaladoc"/>
<chmod perm="ugo+rx" file="${dist.dir}/bin/fsc"/>
<chmod perm="ugo+rx" file="${dist.dir}/bin/scalap"/>
+ <mkdir dir="${dist.dir}/etc"/>
+ <copy toDir="${dist.dir}/etc">
+ <fileset dir="${build-pack.dir}/etc"/>
+ </copy>
+ <mkdir dir="${dist.dir}/plugins"/>
+ <copy toDir="${dist.dir}/plugins">
+ <fileset dir="${build-pack.dir}/plugins"/>
+ </copy>
</target>
<target name="dist.doc" depends="dist.base">
@@ -1430,6 +1592,7 @@ DISTRIBUTION
<jar destfile="${dist.dir}/src/scala-library-src.jar">
<fileset dir="${src.dir}/library"/>
<fileset dir="${src.dir}/actors"/>
+ <fileset dir="${src.dir}/continuations/library"/>
</jar>
<jar destfile="${dist.dir}/src/scala-dbc-src.jar">
<fileset dir="${src.dir}/dbc"/>
@@ -1525,9 +1688,9 @@ STABLE REFERENCE (STARR)
<target name="starr.libs" depends="starr.src" if="libs.outdated">
<copy toDir="${lib.dir}" overwrite="yes">
<fileset dir="${build-libs.dir}">
- <include name="forkjoin.jar"/>
<include name="fjbg.jar"/>
<include name="msil.jar"/>
+ <include name="forkjoin.jar"/>
</fileset>
</copy>
</target>
@@ -1573,7 +1736,7 @@ POSITIONS
<target name="test.positions" depends="quick.comp">
<antcall target="test.positions.tests.sub" inheritRefs="true">
- <param name="test.tests.srcs" value="${test.dir}/files/positions"/>
+ <param name="test.tests.srcs" value="${partest.rootdir}/${partest.srcdir.default}/positions"/>
</antcall>
<antcall target="test.positions.sub" inheritRefs="true">
<param name="test.srcs" value="${src.dir}/compiler"/>
@@ -1597,13 +1760,13 @@ POSITIONS
<param name="test.srcs" value="${src.dir}/scalap"/>
</antcall>
<antcall target="test.positions.tests.sub" inheritRefs="true">
- <param name="test.tests.srcs" value="${test.dir}/files/pos"/>
+ <param name="test.tests.srcs" value="${partest.rootdir}/${partest.srcdir.default}/pos"/>
</antcall>
<antcall target="test.positions.tests.sub" inheritRefs="true">
- <param name="test.tests.srcs" value="${test.dir}/files/run"/>
+ <param name="test.tests.srcs" value="${partest.rootdir}/${partest.srcdir.default}/run"/>
</antcall>
<antcall target="test.positions.tests.sub" inheritRefs="true">
- <param name="test.tests.srcs" value="${test.dir}/files/neg"/>
+ <param name="test.tests.srcs" value="${partest.rootdir}/${partest.srcdir.default}/neg"/>
</antcall>
</target>
diff --git a/docs/examples/parsing/lambda/TestParser.scala b/docs/examples/parsing/lambda/TestParser.scala
index 22257c1731..623b597337 100644
--- a/docs/examples/parsing/lambda/TestParser.scala
+++ b/docs/examples/parsing/lambda/TestParser.scala
@@ -20,7 +20,7 @@ trait TestParser extends StdTokenParsers with ImplicitConversions with TestSynt
def name : Parser[Name] = ident ^^ Name
- // meaning of the argumens to the closure during subsequent iterations
+ // meaning of the arguments to the closure during subsequent iterations
// (...(expr2 op1 expr1) ... op1 expr1)
// ^a^^^ ^o^ ^b^^^
// ^^^^^^^a^^^^^^^ ^o^ ^^b^^
diff --git a/docs/examples/pilib/elasticBuffer.scala b/docs/examples/pilib/elasticBuffer.scala
index e156cafbc2..a0e8bb6a7c 100644
--- a/docs/examples/pilib/elasticBuffer.scala
+++ b/docs/examples/pilib/elasticBuffer.scala
@@ -64,7 +64,7 @@ object elasticBuffer {
def Consumer(get: Chan[String]): Unit = {
Thread.sleep(1 + random.nextInt(1000))
val msg = get.read
- System.out.println("Consummer took " + msg)
+ System.out.println("Consumer took " + msg)
Consumer(get)
}
diff --git a/docs/examples/pilib/mobilePhoneProtocol.scala b/docs/examples/pilib/mobilePhoneProtocol.scala
index 385836318b..0805253ae0 100644
--- a/docs/examples/pilib/mobilePhoneProtocol.scala
+++ b/docs/examples/pilib/mobilePhoneProtocol.scala
@@ -158,7 +158,7 @@ object mobilePhoneProtocol {
def Consumer(get: Chan[String]): unit = {
Thread.sleep(1 + random.nextInt(1000));
val msg = get.read;
- System.out.println("Consummer took " + msg);
+ System.out.println("Consumer took " + msg);
Consumer(get)
}
diff --git a/docs/examples/plugintemplate/src/plugintemplate/standalone/Main.scala b/docs/examples/plugintemplate/src/plugintemplate/standalone/Main.scala
index 53e5e40104..bea6eeee26 100644
--- a/docs/examples/plugintemplate/src/plugintemplate/standalone/Main.scala
+++ b/docs/examples/plugintemplate/src/plugintemplate/standalone/Main.scala
@@ -17,7 +17,7 @@ object Main {
val command = new CompilerCommand(args.toList, settings, println, false) {
/** The command name that will be printed in in the usage message.
- * This is autmatically set to the value of 'plugin.commandname' in the
+ * This is automatically set to the value of 'plugin.commandname' in the
* file build.properties.
*/
override val cmdName = PluginProperties.pluginCommand
@@ -26,7 +26,7 @@ object Main {
if (!command.ok)
return()
- /** The version number of this plugin is read from the porperties file
+ /** The version number of this plugin is read from the properties file
*/
if (settings.version.value) {
println(command.cmdName +" version "+ PluginProperties.versionString)
diff --git a/lib/ScalaCheck.jar b/lib/ScalaCheck.jar
deleted file mode 120000
index 587b63a458..0000000000
--- a/lib/ScalaCheck.jar
+++ /dev/null
@@ -1 +0,0 @@
-scalacheck-1.6dev.jar \ No newline at end of file
diff --git a/lib/fjbg.jar.desired.sha1 b/lib/fjbg.jar.desired.sha1
index 059817251c..04db587a17 100644
--- a/lib/fjbg.jar.desired.sha1
+++ b/lib/fjbg.jar.desired.sha1
@@ -1 +1 @@
-6ef6a21997d01d64a3ff8447a0e110d04b3d6c7d ?fjbg.jar
+bfbfc87845d5bb3402bac61f03734f362f6554b6 ?fjbg.jar
diff --git a/lib/msil.jar.desired.sha1 b/lib/msil.jar.desired.sha1
index 20dd30e75c..cfdfc0b26e 100644
--- a/lib/msil.jar.desired.sha1
+++ b/lib/msil.jar.desired.sha1
@@ -1 +1 @@
-07c906973c7082c3a958e4e56793c005d6404e50 ?msil.jar
+009cec9efeb8f9b9287e49da1c4b69e1a629b299 ?msil.jar
diff --git a/lib/scala-compiler.jar.desired.sha1 b/lib/scala-compiler.jar.desired.sha1
index 2c79854df5..dc48e2f9ae 100644
--- a/lib/scala-compiler.jar.desired.sha1
+++ b/lib/scala-compiler.jar.desired.sha1
@@ -1 +1 @@
-0ab0a9ff4350a7bfc7bbf89d5cab73c9875fae27 ?scala-compiler.jar
+58217106efbefca262416284da22d8f935c0d5a6 ?scala-compiler.jar
diff --git a/lib/scala-library-src.jar.desired.sha1 b/lib/scala-library-src.jar.desired.sha1
index a93fbbf4ea..57f522e23d 100644
--- a/lib/scala-library-src.jar.desired.sha1
+++ b/lib/scala-library-src.jar.desired.sha1
@@ -1 +1 @@
-a62961db6716add97af1095eba07d1f8405cc293 ?scala-library-src.jar
+084aab3593eb7fbc5ffe68c3af565d258146a85b ?scala-library-src.jar
diff --git a/lib/scala-library.jar.desired.sha1 b/lib/scala-library.jar.desired.sha1
index 51969e7802..842de98724 100644
--- a/lib/scala-library.jar.desired.sha1
+++ b/lib/scala-library.jar.desired.sha1
@@ -1 +1 @@
-5b5ef44fef590bf98e3b5c0cade8b333d0f629e3 ?scala-library.jar
+413b018f76f6684e9eb9dc04ea21097e3c59aaf7 ?scala-library.jar
diff --git a/lib/scalacheck-1.6dev.jar.desired.sha1 b/lib/scalacheck-1.6dev.jar.desired.sha1
deleted file mode 100644
index 810b5d7f8e..0000000000
--- a/lib/scalacheck-1.6dev.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-41a805f4ccfab57be082e73f9de416fe6028d694 ?scalacheck-1.6dev.jar
diff --git a/lib/scalacheck.jar.desired.sha1 b/lib/scalacheck.jar.desired.sha1
new file mode 100644
index 0000000000..ed9c46c3db
--- /dev/null
+++ b/lib/scalacheck.jar.desired.sha1
@@ -0,0 +1 @@
+4c76385b1a9cb7cd619739776b940d98c4aadc6d ?scalacheck.jar
diff --git a/scala-lang.ipr b/scala-lang.ipr
deleted file mode 100644
index 750442f47d..0000000000
--- a/scala-lang.ipr
+++ /dev/null
@@ -1,1527 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project version="4">
- <component name="AntConfiguration">
- <defaultAnt bundledAnt="true" />
- <buildFile url="file://$PROJECT_DIR$/build.xml">
- <additionalClassPath />
- <antReference projectDefault="true" />
- <customJdkName value="" />
- <maximumHeapSize value="128" />
- <maximumStackSize value="32" />
- <properties />
- </buildFile>
- </component>
- <component name="BuildJarProjectSettings">
- <option name="BUILD_JARS_ON_MAKE" value="false" />
- </component>
- <component name="CodeStyleSettingsManager">
- <option name="PER_PROJECT_SETTINGS">
- <value>
- <ADDITIONAL_INDENT_OPTIONS fileType="java">
- <option name="INDENT_SIZE" value="4" />
- <option name="CONTINUATION_INDENT_SIZE" value="8" />
- <option name="TAB_SIZE" value="4" />
- <option name="USE_TAB_CHARACTER" value="false" />
- <option name="SMART_TABS" value="false" />
- <option name="LABEL_INDENT_SIZE" value="0" />
- <option name="LABEL_INDENT_ABSOLUTE" value="false" />
- </ADDITIONAL_INDENT_OPTIONS>
- <ADDITIONAL_INDENT_OPTIONS fileType="jsp">
- <option name="INDENT_SIZE" value="4" />
- <option name="CONTINUATION_INDENT_SIZE" value="8" />
- <option name="TAB_SIZE" value="4" />
- <option name="USE_TAB_CHARACTER" value="false" />
- <option name="SMART_TABS" value="false" />
- <option name="LABEL_INDENT_SIZE" value="0" />
- <option name="LABEL_INDENT_ABSOLUTE" value="false" />
- </ADDITIONAL_INDENT_OPTIONS>
- <ADDITIONAL_INDENT_OPTIONS fileType="xml">
- <option name="INDENT_SIZE" value="4" />
- <option name="CONTINUATION_INDENT_SIZE" value="8" />
- <option name="TAB_SIZE" value="4" />
- <option name="USE_TAB_CHARACTER" value="false" />
- <option name="SMART_TABS" value="false" />
- <option name="LABEL_INDENT_SIZE" value="0" />
- <option name="LABEL_INDENT_ABSOLUTE" value="false" />
- </ADDITIONAL_INDENT_OPTIONS>
- </value>
- </option>
- </component>
- <component name="CompilerAPISettings">
- <option name="DEBUGGING_INFO" value="true" />
- <option name="GENERATE_NO_WARNINGS" value="false" />
- <option name="DEPRECATION" value="true" />
- <option name="ADDITIONAL_OPTIONS_STRING" value="" />
- <option name="MAXIMUM_HEAP_SIZE" value="128" />
- </component>
- <component name="CompilerConfiguration">
- <option name="DEFAULT_COMPILER" value="Javac" />
- <resourceExtensions>
- <entry name=".+\.(properties|xml|html|dtd|tld)" />
- <entry name=".+\.(gif|png|jpeg|jpg)" />
- </resourceExtensions>
- <wildcardResourcePatterns>
- <entry name="?*.properties" />
- <entry name="?*.xml" />
- <entry name="?*.gif" />
- <entry name="?*.png" />
- <entry name="?*.jpeg" />
- <entry name="?*.jpg" />
- <entry name="?*.html" />
- <entry name="?*.dtd" />
- <entry name="?*.tld" />
- <entry name="?*.ftl" />
- </wildcardResourcePatterns>
- <annotationProcessing enabled="false" useClasspath="true" />
- </component>
- <component name="CopyrightManager" default="">
- <module2copyright />
- </component>
- <component name="DependencyValidationManager">
- <option name="SKIP_IMPORT_STATEMENTS" value="false" />
- </component>
- <component name="EclipseCompilerSettings">
- <option name="DEBUGGING_INFO" value="true" />
- <option name="GENERATE_NO_WARNINGS" value="true" />
- <option name="DEPRECATION" value="false" />
- <option name="ADDITIONAL_OPTIONS_STRING" value="" />
- <option name="MAXIMUM_HEAP_SIZE" value="128" />
- </component>
- <component name="EclipseEmbeddedCompilerSettings">
- <option name="DEBUGGING_INFO" value="true" />
- <option name="GENERATE_NO_WARNINGS" value="true" />
- <option name="DEPRECATION" value="false" />
- <option name="ADDITIONAL_OPTIONS_STRING" value="" />
- <option name="MAXIMUM_HEAP_SIZE" value="128" />
- </component>
- <component name="Encoding" useUTFGuessing="true" native2AsciiForPropertiesFiles="false">
- <file url="file://$PROJECT_DIR$/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala" charset="UTF-8" />
- </component>
- <component name="FacetAutodetectingManager">
- <autodetection-disabled>
- <facet-type id="Scala">
- <modules>
- <module name="files">
- <files>
- <file url="file://$PROJECT_DIR$/test/files/android/HelloAndroid.scala" />
- <file url="file://$PROJECT_DIR$/test/files/ant/fsc.scala" />
- <file url="file://$PROJECT_DIR$/test/files/ant/scalac.scala" />
- <file url="file://$PROJECT_DIR$/test/files/ant/scaladoc.scala" />
- <file url="file://$PROJECT_DIR$/test/files/cldc/randoms.scala" />
- <file url="file://$PROJECT_DIR$/test/files/cli/test1/Main.check.scala" />
- <file url="file://$PROJECT_DIR$/test/files/cli/test1/Main.scala" />
- <file url="file://$PROJECT_DIR$/test/files/cli/test2/Main.check.scala" />
- <file url="file://$PROJECT_DIR$/test/files/cli/test2/Main.scala" />
- <file url="file://$PROJECT_DIR$/test/files/cli/test3/Main.check.scala" />
- <file url="file://$PROJECT_DIR$/test/files/cli/test3/Main.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm/JavaInteraction.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm/bigints.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm/bug560bis.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm/inner.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm/manifests.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm/methvsfield.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm/natives.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm/nest.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm/protectedacc.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm/serialization.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm/t0632.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm/t1116.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm/t1143-2/t1143-2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm/t1143.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm/typerep.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm/unittest_io.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm/unittest_xml.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm/xml01.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm/xml02.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm/xml03syntax.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm/xml04embed.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm/xmlattr.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm/xmlmore.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm/xmlpull.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm/xmlstuff.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm5/annotations.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm5/bug676.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm5/bug680.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm5/console.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm5/genericNest.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm5/interpreter.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm5/outerEnum.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm5/stringbuilder.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm5/sync-var.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm5/t0014.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm5/t1461.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm5/t1464/MyTrait.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm5/throws-annot.scala" />
- <file url="file://$PROJECT_DIR$/test/files/jvm5/typerep.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/abstract.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/accesses.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/annot-nonconst.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/badtok-1.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/badtok-2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/badtok-3.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug1010.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug1011.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug1017.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug1041.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug1106.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug1112.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug112706A.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug1181.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug1183.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug1224.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug1241.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug1275.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug1392.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug1523.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug1623.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug1838.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug200.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug276.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug278.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug284.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug343.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug391.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug409.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug412.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug414.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug418.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug421.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug452.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug473.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug500.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug501.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug510.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug512.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug515.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug520.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug521.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug545.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug550.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug555.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug556.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug558.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug562.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug563.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug565.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug576.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug585.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug588.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug591.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug593.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug608.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug630.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug631.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug633.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug639.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug649.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug650.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug663.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug664.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug667.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug668.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug677.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug691.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug692.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug693.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug696.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug700.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug708.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug712.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug715.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug729.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug752.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug765.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug766.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug779.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug783.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug798.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug800.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug835.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug836.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug845.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug846.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug856.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug875.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug876.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug877.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug882.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug900.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug908.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug909.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug910.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug935.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug944.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug960.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug961.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug987.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/bug997.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/checksensible.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/constrs.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/cyclics.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/divergent-implicit.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/faculty.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/forward.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/gadts1.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/higherkind_novalue.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/imp2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/implicits.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/lazy-override.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/lazyvals.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/lubs.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/mixins.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/multi-array.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/nopredefs.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/null-unsoundness.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/overload.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/parstar.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/patmatexhaust.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/patternalts.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/plugin-after-terminal/src/ThePlugin.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/plugin-after-terminal/testsource.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/plugin-before-parser/src/ThePlugin.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/plugin-before-parser/testsource.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/plugin-cyclic-dependency/src/ThePlugin.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/plugin-cyclic-dependency/testsource.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/plugin-multiple-rafter/src/ThePlugin.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/plugin-multiple-rafter/testsource.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/plugin-rafter-before-1/src/ThePlugin.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/plugin-rafter-before-1/testsource.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/plugin-rightafter-terminal/src/ThePlugin.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/plugin-rightafter-terminal/testsource.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/sabin2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/saito.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/sensitive.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/structural.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/switch.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t0003.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t0015.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t0117.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t0152.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t0204.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t0207.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t0209.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t0214.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t0218.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t0226.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t0259.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t0345.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t0351.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t0503.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t0528neg.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t0590.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t0606.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t0673/Test.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t0699/A.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t0699/B.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t0764.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t0842.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t0899.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t0903.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t1009.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t1033.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t1049.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t1163.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t1168.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t1215.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t1371.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/t1659.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/tailrec.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/tcpoly_bounds.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/tcpoly_override.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/tcpoly_typealias.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/tcpoly_variance.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/tcpoly_variance_enforce.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/typeerror.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/unreachablechar.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/variances.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/viewtest.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/volatile-intersection.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/volatile.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/wellkinded_app.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/wellkinded_app2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/wellkinded_bounds.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/wellkinded_wrongarity.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/wellkinded_wrongarity2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/xmlcorner.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/xmltruncated1.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/xmltruncated2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/xmltruncated3.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/xmltruncated4.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/xmltruncated5.scala" />
- <file url="file://$PROJECT_DIR$/test/files/neg/xmltruncated6.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/A.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/List1.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/MailBox.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/NoCyclicReference.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/S1.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/S3.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/S5.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/S8.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/Transactions.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/X.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/Z.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/abstract.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/aliases.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/annot-inner.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/annotations.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/arrays2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/attributes.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bounds.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0002.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0017.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0020.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0029.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0030.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0031.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0032.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0036.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0039.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0049.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0053.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0054.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0061.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0064.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0066.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0068.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0069.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0076.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0081.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0082.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0085.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0091.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0093.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0123.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0204.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0304.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0325.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0422.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0599.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug0646.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1000.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1001.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1006.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1014.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1034.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1049.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1050.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1056.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1070.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1075.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1085.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1087.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1090.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1107.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1119.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1123.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug112606A.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1136.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug115.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug116.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1168.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1185.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug119.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1203.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug121.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1210.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1210a.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug122.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1237.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug124.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1241.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1279a.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1292.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1385.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug151.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1565.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug159.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug160.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug175.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug177.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug183.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug1858.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug201.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug210.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug211.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug229.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug245.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug247.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug262.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug267.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug284.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug287.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug289.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug295.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug296.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug304.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug318.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug319.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug342.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug344.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug348plus.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug359.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug360.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug361.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug372.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug374.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug389.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug397.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug402.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug404.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug415.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug419.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug422.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug430-feb09.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug430.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug432.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug439.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug443.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug460.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug514.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug516.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug522.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug530.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug531.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug532.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug533.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug566.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug577.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug592.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug595.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug596.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug599.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug602.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug604.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug607.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug611.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug613.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug615.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug616.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug628.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug640.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug651.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug661.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug675.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug684.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug690.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug694.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug697.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug698.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug703.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug704.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug711.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug720.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug756.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug757.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug757a.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug758.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug759.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug762.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug767.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug780.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug788.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug789.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug796.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug802.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug803.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug805.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug807.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug812.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug839.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug851.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug873.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug880.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug892.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug911.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug927.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/bug946.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/builders.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/caseaccs.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/cfcrash.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/channels.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/cls.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/cls1.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/clsrefine.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/code.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/collections.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/comp-rec-test.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/compile.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/compile1.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/compound.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/constfold.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/contrib467.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/contrib701.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/cyclics.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/escapes2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/eta.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/exceptions.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/functions.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/gadt-gilles.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/gadts2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/gosh.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/gui.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/homonym.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/imp2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/imports.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/infer.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/infer2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/inferbroadtype.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/init.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/itay.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/jesper.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/kinzer.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/ksbug1.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/lambda.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/lambdalift.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/lambdalift1.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/largecasetest.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/listpattern.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/localmodules.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/looping-jsig.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/manifest1.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/matchStarlift.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/matthias1.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/matthias3.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/matthias4.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/matthias5.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/maxim1.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/michel1.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/michel2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/michel3.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/michel4.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/michel5.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/michel6.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/mixins.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/modules.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/modules1.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/moduletrans.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/needstypeearly.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/nested.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/nested2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/null.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/nullary.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/nullary_poly.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/override.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/partialfun.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/pat_gilles.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/pat_iuli.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/patterns.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/patterns1.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/patterns1213.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/patterns2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/patterns3.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/philippe1.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/philippe2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/philippe3.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/philippe4.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/pmbug.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/proj-rec-test.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/propagate.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/protected-t1010.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/rebind.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/return_thistype.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/scoping1.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/scoping2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/scoping3.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/seqtest2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/simplelists.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/stable.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/strings.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/sudoku.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0055.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0154.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0165.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0227.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0231.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0273.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0288/Foo.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0301.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0438.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0453.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0504.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0586.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0591.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0651.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0654.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0674.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0710.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0770.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0774/deathname.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0774/unrelated.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0786.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0851.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0872.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0904.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0905.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t0999.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t1001.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t1027.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t1049.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t1059.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t1087.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t1107/O.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t1107/T.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t1131.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t1146.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t1147.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t1159.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t1164.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t1280.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t1380/hallo.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t1391.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t1409/ConcreteImpl.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t1438.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t1439.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t1480.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t1648.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t1675.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t1761.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t1789.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/t1840/S.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/tcpoly_boundedmonad.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/tcpoly_bounds1.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/tcpoly_checkkinds_mix.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/tcpoly_gm.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/tcpoly_higherorder_bound_method.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/tcpoly_late_method_params.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/tcpoly_method.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/tcpoly_overloaded.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/tcpoly_param_scoping.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/tcpoly_poly.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/tcpoly_return_overriding.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/tcpoly_seq.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/tcpoly_seq_typealias.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/tcpoly_subst.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/tcpoly_typeapp.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/tcpoly_typesub.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/tcpoly_variance.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/tcpoly_wildcards.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/ted.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/test1.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/test2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/test4.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/test4a.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/test4refine.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/test5.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/test5refine.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/testcast.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/thistype.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/thistypes.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/ticket0137.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/tinondefcons.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/traits.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/tryexpr.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/typealias_dubious.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/typealiases.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/typerep-stephane.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/typerep.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/typesafecons.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/unapply.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/unapplyComplex.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/unapplyContexts2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/unapplyGeneric.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/unapplyNeedsMemberType.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/unapplySeq.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/unapplyVal.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/unicode-decode.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/valdefs.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/variances.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/viewtest1.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos/viewtest2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos5/chang/Test.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos5/ilya/S.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos5/ilya2/A.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos5/signatures/sig.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos5/t0695/Test.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos5/t1101/S.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos5/t1102/S.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos5/t1150/S.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos5/t1152/S.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos5/t1176/S.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos5/t1196/S.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos5/t1197/S.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos5/t1203/S.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos5/t1230/S.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos5/t1231/S.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos5/t1232/S.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos5/t1263/test.scala" />
- <file url="file://$PROJECT_DIR$/test/files/pos5/t1711/Seq.scala" />
- <file url="file://$PROJECT_DIR$/test/files/res/bug597/Main.scala" />
- <file url="file://$PROJECT_DIR$/test/files/res/bug597/Test.scala" />
- <file url="file://$PROJECT_DIR$/test/files/res/bug687/QueryA.scala" />
- <file url="file://$PROJECT_DIR$/test/files/res/bug687/QueryB.scala" />
- <file url="file://$PROJECT_DIR$/test/files/res/bug722/IfElse.scala" />
- <file url="file://$PROJECT_DIR$/test/files/res/bug722/Parser.scala" />
- <file url="file://$PROJECT_DIR$/test/files/res/bug722/ScanBased.scala" />
- <file url="file://$PROJECT_DIR$/test/files/res/bug735/ScalaExpressions.scala" />
- <file url="file://$PROJECT_DIR$/test/files/res/bug735/ScalaTyper.scala" />
- <file url="file://$PROJECT_DIR$/test/files/res/bug743/BracesXXX.scala" />
- <file url="file://$PROJECT_DIR$/test/files/res/bug743/ParserXXX.scala" />
- <file url="file://$PROJECT_DIR$/test/files/res/bug785/ScalaNewTyper.scala" />
- <file url="file://$PROJECT_DIR$/test/files/res/bug785/ScalaTrees.scala" />
- <file url="file://$PROJECT_DIR$/test/files/res/bug831/NewScalaParserXXX.scala" />
- <file url="file://$PROJECT_DIR$/test/files/res/bug831/NewScalaTestXXX.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/Course-2002-01.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/Course-2002-02.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/Course-2002-03.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/Course-2002-04.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/Course-2002-05.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/Course-2002-06.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/Course-2002-07.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/Course-2002-08.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/Course-2002-09.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/Course-2002-10.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/Course-2002-13.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/NestedClasses.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/absoverride.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/amp.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/arrays.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/arybufgrow.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/bitsets.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/boolexprs.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/boolord.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/bridges.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/bug0325.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/bug1074.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/bug1192.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/bug1220.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/bug216.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/bug405.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/bug428.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/bug429.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/bug594.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/bug601.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/bug603.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/bug627.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/bug629.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/bug657.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/bug744.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/bug889.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/bug920.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/bug949.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/bug978.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/bugs.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/byname.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/caseclasses.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/checked.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/classof.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/collection-stacks.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/collections.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/colltest.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/complicatedmatch.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/constrained-types.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/constructors.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/contrib674.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/ctor-order.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/deeps.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/docgenerator.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/enums.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/exceptions-2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/exceptions.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/existentials.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/exoticnames.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/fors.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/forvaleq.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/gadts.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/impconvtimes.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/implicits.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/imports.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/infiniteloop.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/infix.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/intmap.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/iq.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/issue192.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/iterables.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/iterators.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/json.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/jtptest.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/lazy-exprs.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/lazy-locals.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/lazy-override.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/lazy-traits.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/lisp.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/lists.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/literals.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/map_test.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/matcharraytail.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/matchbytes.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/matchemptyarray.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/matchintasany.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/matchonstream.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/misc.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/missingparams.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/mixins.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/multi-array.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/overloads.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/patmatnew.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/promotion.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/randomAccessSeq-apply.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/range.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/retclosure.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/retsynch.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/richs.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/runtime-richChar.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/runtime.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/slices.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/sort.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/streams.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/structural.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/syncchannel.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t0005.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t0017.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t0042.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t0091.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t0412.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t0421.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t0485.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t0486.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t0508.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t0528.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t0607.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t0631.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t0663.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t0668.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t0677.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t0700.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t0807.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t0883.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t0911.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t0936.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t1323.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t1368.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t1423.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t1500.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t1501.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t1505.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t1524.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t1535.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t1618.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t1620.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t1718.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t1747.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/t1829.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/tailcalls.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/tcpoly_monads.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/tcpoly_overriding.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/tcpoly_parseridioms.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/try-2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/try.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/tuples.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/typealias_overriding.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/unapply.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/unapplyArray.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/unboxingBug.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/unittest_collection.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/unittest_io.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/verify-ctor.scala" />
- <file url="file://$PROJECT_DIR$/test/files/run/withIndex.scala" />
- <file url="file://$PROJECT_DIR$/test/files/scalacheck/list.scala" />
- <file url="file://$PROJECT_DIR$/test/files/script/fact.scala" />
- <file url="file://$PROJECT_DIR$/test/files/script/second.scala" />
- <file url="file://$PROJECT_DIR$/test/files/script/t1015.scala" />
- <file url="file://$PROJECT_DIR$/test/files/script/t1017.scala" />
- <file url="file://$PROJECT_DIR$/test/files/script/utf8.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/ackermann.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/ary.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/binarytrees.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/binarytrees.scala-2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/binarytrees.scala-3.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/chameneos.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/except.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/fannkuch.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/fannkuch.scala-2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/fibo.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/harmonic.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/hash.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/hash2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/hello.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/knucleotide.scala-2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/knucleotide.scala-3.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/lists.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/message.scala-2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/nbody.scala-2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/nestedloop.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/nsieve.scala-2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/nsieve.scala-3.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/nsievebits.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/nsievebits.scala-2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/nsievebits.scala-3.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/objinst.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/partialsums.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/partialsums.scala-2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/recursive.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/regexdna.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/spectralnorm.scala-2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/strcat.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/sumcol.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/sumcol.scala-2.scala" />
- <file url="file://$PROJECT_DIR$/test/files/shootout/takfp.scala" />
- </files>
- </module>
- <module name="library">
- <files>
- <file url="file://$PROJECT_DIR$/src/android-library/scala/ScalaObject.scala" />
- <file url="file://$PROJECT_DIR$/src/android-library/scala/reflect/ScalaBeanInfo.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/Application.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/BigDecimal.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/BigInt.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/Console.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/Math.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/Numeric.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/Ordering.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/Predef.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/Range.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/Symbol.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/collection/JavaConversions.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/collection/immutable/List.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/collection/immutable/PagedSeq.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/collection/mutable/OpenHashMap.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/collection/mutable/StringBuilder.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/collection/mutable/WeakHashMap.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/compat/Platform.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/concurrent/DelayedLazyVal.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/concurrent/jolib.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/concurrent/ops.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/concurrent/pilib.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/io/BufferedSource.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/io/BytePickle.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/io/Codec.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/io/File.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/io/Position.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/io/Source.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/io/UTF8Codec.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/mobile/Code.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/mobile/Location.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/net/Utility.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/ref/PhantomReference.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/ref/ReferenceQueue.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/ref/ReferenceWrapper.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/ref/SoftReference.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/ref/WeakReference.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/reflect/Invocation.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/reflect/Manifest.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/reflect/ScalaBeanInfo.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/runtime/MethodCache.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/runtime/NonLocalReturnException.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/runtime/RichChar.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/runtime/RichClass.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/runtime/RichDouble.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/runtime/RichException.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/runtime/RichFloat.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/runtime/RichInt.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/runtime/RichLong.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/runtime/RichString.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/runtime/StringAdd.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/testing/Benchmark.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/testing/SUnit.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/testing/Show.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/text/Document.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/ClassLoader.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/DynamicVariable.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/Marshal.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/NameTransformer.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/Properties.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/Random.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/automata/BaseBerrySethi.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/automata/DetWordAutom.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/automata/Inclusion.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/automata/NondetWordAutom.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/automata/SubsetConstruction.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/automata/WordBerrySethi.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/control/Exception.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/grammar/HedgeRHS.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/grammar/TreeRHS.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/logging/ConsoleLogger.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/logging/Logged.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/matching/Regex.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/ast/AbstractSyntax.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/ast/Binders.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/combinator/ImplicitConversions.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/combinator/JavaTokenParsers.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/combinator/PackratParsers.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/combinator/Parsers.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/combinator/RegexParsers.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/combinator/lexical/Lexical.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/combinator/lexical/Scanners.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/combinator/lexical/StdLexical.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/combinator/syntactical/TokenParsers.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/combinator/testing/RegexTest.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/combinator/testing/Tester.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/combinatorold/$tilde.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/combinatorold/ImplicitConversions.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/combinatorold/Parsers.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/combinatorold/lexical/Lexical.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/combinatorold/lexical/Scanners.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/combinatorold/lexical/StdLexical.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/combinatorold/syntactical/BindingParsers.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/combinatorold/syntactical/StdTokenParsers.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/combinatorold/syntactical/TokenParsers.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/combinatorold/testing/Tester.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/input/CharArrayPosition.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/input/CharArrayReader.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/input/CharSequenceReader.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/input/NoPosition.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/input/OffsetPosition.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/input/PagedSeqReader.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/input/Position.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/input/Positional.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/input/Reader.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/input/StreamReader.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/json/JSON.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/json/Lexer.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/json/Parser.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/syntax/StdTokens.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/parsing/syntax/Tokens.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/regexp/Base.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/regexp/PointedHedgeExp.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/regexp/SyntaxError.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/util/regexp/WordExp.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/Atom.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/Attribute.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/Comment.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/Document.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/Elem.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/EntityRef.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/Group.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/HasKeyValue.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/MalformedAttributeException.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/MetaData.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/NamespaceBinding.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/Node.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/NodeBuffer.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/NodeSeq.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/NodeTraverser.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/Null.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/PCData.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/Parsing.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/PrefixedAttribute.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/PrettyPrinter.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/ProcInstr.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/QNode.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/SpecialNode.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/Text.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/TextBuffer.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/TopScope.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/TypeSymbol.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/Unparsed.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/UnprefixedAttribute.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/Utility.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/XML.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/Xhtml.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/dtd/ContentModel.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/dtd/ContentModelParser.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/dtd/DTD.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/dtd/Decl.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/dtd/DocType.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/dtd/DtdTypeSymbol.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/dtd/ElementValidator.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/dtd/ExternalID.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/dtd/Scanner.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/dtd/Tokens.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/dtd/ValidationException.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/factory/Binder.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/factory/LoggedNodeFactory.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/factory/NodeFactory.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/factory/XMLLoader.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/include/CircularIncludeException.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/include/UnavailableResourceException.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/include/XIncludeException.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/include/sax/EncodingHeuristics.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/include/sax/Main.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/include/sax/XIncludeFilter.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/include/sax/XIncluder.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/parsing/ConstructingHandler.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/parsing/ConstructingParser.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/parsing/DefaultMarkupHandler.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/parsing/ExternalSources.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/parsing/FactoryAdapter.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/parsing/FatalError.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/parsing/MarkupHandler.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/parsing/MarkupParser.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/parsing/NoBindingFactoryAdapter.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/parsing/TokenTests.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/parsing/ValidatingMarkupHandler.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/parsing/XhtmlEntities.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/parsing/XhtmlParser.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/path/Expression.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/persistent/CachedFileStorage.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/persistent/Index.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/persistent/IndexedStorage.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/persistent/SetStorage.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/pull/XMLEvent.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/pull/XMLEventReader.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/transform/BasicTransformer.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/transform/RewriteRule.scala" />
- <file url="file://$PROJECT_DIR$/src/dotnet-library/scala/xml/transform/RuleTransformer.scala" />
- <file url="file://$PROJECT_DIR$/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala" />
- </files>
- </module>
- </modules>
- </facet-type>
- </autodetection-disabled>
- </component>
- <component name="IdProvider" IDEtalkID="522B472C2EA573563CC2BA93160270BE" />
- <component name="InspectionProjectProfileManager">
- <list size="5">
- <item index="0" class="java.lang.String" itemvalue="TYPO" />
- <item index="1" class="java.lang.String" itemvalue="SERVER PROBLEM" />
- <item index="2" class="java.lang.String" itemvalue="INFO" />
- <item index="3" class="java.lang.String" itemvalue="WARNING" />
- <item index="4" class="java.lang.String" itemvalue="ERROR" />
- </list>
- </component>
- <component name="JavacSettings">
- <option name="DEBUGGING_INFO" value="true" />
- <option name="GENERATE_NO_WARNINGS" value="false" />
- <option name="DEPRECATION" value="true" />
- <option name="ADDITIONAL_OPTIONS_STRING" value="" />
- <option name="MAXIMUM_HEAP_SIZE" value="128" />
- </component>
- <component name="JavadocGenerationManager">
- <option name="OUTPUT_DIRECTORY" />
- <option name="OPTION_SCOPE" value="protected" />
- <option name="OPTION_HIERARCHY" value="true" />
- <option name="OPTION_NAVIGATOR" value="true" />
- <option name="OPTION_INDEX" value="true" />
- <option name="OPTION_SEPARATE_INDEX" value="true" />
- <option name="OPTION_DOCUMENT_TAG_USE" value="false" />
- <option name="OPTION_DOCUMENT_TAG_AUTHOR" value="false" />
- <option name="OPTION_DOCUMENT_TAG_VERSION" value="false" />
- <option name="OPTION_DOCUMENT_TAG_DEPRECATED" value="true" />
- <option name="OPTION_DEPRECATED_LIST" value="true" />
- <option name="OTHER_OPTIONS" value="" />
- <option name="HEAP_SIZE" />
- <option name="LOCALE" />
- <option name="OPEN_IN_BROWSER" value="true" />
- </component>
- <component name="JikesSettings">
- <option name="JIKES_PATH" value="" />
- <option name="DEBUGGING_INFO" value="true" />
- <option name="DEPRECATION" value="true" />
- <option name="GENERATE_NO_WARNINGS" value="false" />
- <option name="IS_EMACS_ERRORS_MODE" value="true" />
- <option name="ADDITIONAL_OPTIONS_STRING" value="" />
- </component>
- <component name="Palette2">
- <group name="Swing">
- <item class="com.intellij.uiDesigner.HSpacer" tooltip-text="Horizontal Spacer" icon="/com/intellij/uiDesigner/icons/hspacer.png" removable="false" auto-create-binding="false" can-attach-label="false">
- <default-constraints vsize-policy="1" hsize-policy="6" anchor="0" fill="1" />
- </item>
- <item class="com.intellij.uiDesigner.VSpacer" tooltip-text="Vertical Spacer" icon="/com/intellij/uiDesigner/icons/vspacer.png" removable="false" auto-create-binding="false" can-attach-label="false">
- <default-constraints vsize-policy="6" hsize-policy="1" anchor="0" fill="2" />
- </item>
- <item class="javax.swing.JPanel" icon="/com/intellij/uiDesigner/icons/panel.png" removable="false" auto-create-binding="false" can-attach-label="false">
- <default-constraints vsize-policy="3" hsize-policy="3" anchor="0" fill="3" />
- </item>
- <item class="javax.swing.JScrollPane" icon="/com/intellij/uiDesigner/icons/scrollPane.png" removable="false" auto-create-binding="false" can-attach-label="true">
- <default-constraints vsize-policy="7" hsize-policy="7" anchor="0" fill="3" />
- </item>
- <item class="javax.swing.JButton" icon="/com/intellij/uiDesigner/icons/button.png" removable="false" auto-create-binding="true" can-attach-label="false">
- <default-constraints vsize-policy="0" hsize-policy="3" anchor="0" fill="1" />
- <initial-values>
- <property name="text" value="Button" />
- </initial-values>
- </item>
- <item class="javax.swing.JRadioButton" icon="/com/intellij/uiDesigner/icons/radioButton.png" removable="false" auto-create-binding="true" can-attach-label="false">
- <default-constraints vsize-policy="0" hsize-policy="3" anchor="8" fill="0" />
- <initial-values>
- <property name="text" value="RadioButton" />
- </initial-values>
- </item>
- <item class="javax.swing.JCheckBox" icon="/com/intellij/uiDesigner/icons/checkBox.png" removable="false" auto-create-binding="true" can-attach-label="false">
- <default-constraints vsize-policy="0" hsize-policy="3" anchor="8" fill="0" />
- <initial-values>
- <property name="text" value="CheckBox" />
- </initial-values>
- </item>
- <item class="javax.swing.JLabel" icon="/com/intellij/uiDesigner/icons/label.png" removable="false" auto-create-binding="false" can-attach-label="false">
- <default-constraints vsize-policy="0" hsize-policy="0" anchor="8" fill="0" />
- <initial-values>
- <property name="text" value="Label" />
- </initial-values>
- </item>
- <item class="javax.swing.JTextField" icon="/com/intellij/uiDesigner/icons/textField.png" removable="false" auto-create-binding="true" can-attach-label="true">
- <default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1">
- <preferred-size width="150" height="-1" />
- </default-constraints>
- </item>
- <item class="javax.swing.JPasswordField" icon="/com/intellij/uiDesigner/icons/passwordField.png" removable="false" auto-create-binding="true" can-attach-label="true">
- <default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1">
- <preferred-size width="150" height="-1" />
- </default-constraints>
- </item>
- <item class="javax.swing.JFormattedTextField" icon="/com/intellij/uiDesigner/icons/formattedTextField.png" removable="false" auto-create-binding="true" can-attach-label="true">
- <default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1">
- <preferred-size width="150" height="-1" />
- </default-constraints>
- </item>
- <item class="javax.swing.JTextArea" icon="/com/intellij/uiDesigner/icons/textArea.png" removable="false" auto-create-binding="true" can-attach-label="true">
- <default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
- <preferred-size width="150" height="50" />
- </default-constraints>
- </item>
- <item class="javax.swing.JTextPane" icon="/com/intellij/uiDesigner/icons/textPane.png" removable="false" auto-create-binding="true" can-attach-label="true">
- <default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
- <preferred-size width="150" height="50" />
- </default-constraints>
- </item>
- <item class="javax.swing.JEditorPane" icon="/com/intellij/uiDesigner/icons/editorPane.png" removable="false" auto-create-binding="true" can-attach-label="true">
- <default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
- <preferred-size width="150" height="50" />
- </default-constraints>
- </item>
- <item class="javax.swing.JComboBox" icon="/com/intellij/uiDesigner/icons/comboBox.png" removable="false" auto-create-binding="true" can-attach-label="true">
- <default-constraints vsize-policy="0" hsize-policy="2" anchor="8" fill="1" />
- </item>
- <item class="javax.swing.JTable" icon="/com/intellij/uiDesigner/icons/table.png" removable="false" auto-create-binding="true" can-attach-label="false">
- <default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
- <preferred-size width="150" height="50" />
- </default-constraints>
- </item>
- <item class="javax.swing.JList" icon="/com/intellij/uiDesigner/icons/list.png" removable="false" auto-create-binding="true" can-attach-label="false">
- <default-constraints vsize-policy="6" hsize-policy="2" anchor="0" fill="3">
- <preferred-size width="150" height="50" />
- </default-constraints>
- </item>
- <item class="javax.swing.JTree" icon="/com/intellij/uiDesigner/icons/tree.png" removable="false" auto-create-binding="true" can-attach-label="false">
- <default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
- <preferred-size width="150" height="50" />
- </default-constraints>
- </item>
- <item class="javax.swing.JTabbedPane" icon="/com/intellij/uiDesigner/icons/tabbedPane.png" removable="false" auto-create-binding="true" can-attach-label="false">
- <default-constraints vsize-policy="3" hsize-policy="3" anchor="0" fill="3">
- <preferred-size width="200" height="200" />
- </default-constraints>
- </item>
- <item class="javax.swing.JSplitPane" icon="/com/intellij/uiDesigner/icons/splitPane.png" removable="false" auto-create-binding="false" can-attach-label="false">
- <default-constraints vsize-policy="3" hsize-policy="3" anchor="0" fill="3">
- <preferred-size width="200" height="200" />
- </default-constraints>
- </item>
- <item class="javax.swing.JSpinner" icon="/com/intellij/uiDesigner/icons/spinner.png" removable="false" auto-create-binding="true" can-attach-label="true">
- <default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1" />
- </item>
- <item class="javax.swing.JSlider" icon="/com/intellij/uiDesigner/icons/slider.png" removable="false" auto-create-binding="true" can-attach-label="false">
- <default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1" />
- </item>
- <item class="javax.swing.JSeparator" icon="/com/intellij/uiDesigner/icons/separator.png" removable="false" auto-create-binding="false" can-attach-label="false">
- <default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3" />
- </item>
- <item class="javax.swing.JProgressBar" icon="/com/intellij/uiDesigner/icons/progressbar.png" removable="false" auto-create-binding="true" can-attach-label="false">
- <default-constraints vsize-policy="0" hsize-policy="6" anchor="0" fill="1" />
- </item>
- <item class="javax.swing.JToolBar" icon="/com/intellij/uiDesigner/icons/toolbar.png" removable="false" auto-create-binding="false" can-attach-label="false">
- <default-constraints vsize-policy="0" hsize-policy="6" anchor="0" fill="1">
- <preferred-size width="-1" height="20" />
- </default-constraints>
- </item>
- <item class="javax.swing.JToolBar$Separator" icon="/com/intellij/uiDesigner/icons/toolbarSeparator.png" removable="false" auto-create-binding="false" can-attach-label="false">
- <default-constraints vsize-policy="0" hsize-policy="0" anchor="0" fill="1" />
- </item>
- <item class="javax.swing.JScrollBar" icon="/com/intellij/uiDesigner/icons/scrollbar.png" removable="false" auto-create-binding="true" can-attach-label="false">
- <default-constraints vsize-policy="6" hsize-policy="0" anchor="0" fill="2" />
- </item>
- </group>
- </component>
- <component name="ProjectDetails">
- <option name="projectName" value="scala-lang" />
- </component>
- <component name="ProjectDictionaryState">
- <dictionary name="dragos" />
- <dictionary name="odersky" />
- </component>
- <component name="ProjectKey">
- <option name="state" value="https://lampsvn.epfl.ch/svn-repos/scala/scala/trunk/scala-lang.ipr" />
- </component>
- <component name="ProjectModuleManager">
- <modules>
- <module fileurl="file://$PROJECT_DIR$/actors.iml" filepath="$PROJECT_DIR$/actors.iml" />
- <module fileurl="file://$PROJECT_DIR$/compiler.iml" filepath="$PROJECT_DIR$/compiler.iml" />
- <module fileurl="file://$PROJECT_DIR$/dbc.iml" filepath="$PROJECT_DIR$/dbc.iml" />
- <module fileurl="file://$PROJECT_DIR$/library.iml" filepath="$PROJECT_DIR$/library.iml" />
- <module fileurl="file://$PROJECT_DIR$/manual.iml" filepath="$PROJECT_DIR$/manual.iml" />
- <module fileurl="file://$PROJECT_DIR$/partest.iml" filepath="$PROJECT_DIR$/partest.iml" />
- <module fileurl="file://$PROJECT_DIR$/scalap.iml" filepath="$PROJECT_DIR$/scalap.iml" />
- <module fileurl="file://$PROJECT_DIR$/swing.iml" filepath="$PROJECT_DIR$/swing.iml" />
- </modules>
- </component>
- <component name="ProjectRootManager" version="2" languageLevel="JDK_1_5" assert-keyword="true" jdk-15="true" project-jdk-name="1.6" project-jdk-type="JavaSDK">
- <output url="file://$PROJECT_DIR$/out" />
- </component>
- <component name="ResourceManagerContainer">
- <option name="myResourceBundles">
- <value>
- <list size="0" />
- </value>
- </option>
- </component>
- <component name="RmicSettings">
- <option name="IS_EANABLED" value="false" />
- <option name="DEBUGGING_INFO" value="true" />
- <option name="GENERATE_NO_WARNINGS" value="false" />
- <option name="GENERATE_IIOP_STUBS" value="false" />
- <option name="ADDITIONAL_OPTIONS_STRING" value="" />
- </component>
- <component name="ScalacSettings">
- <option name="MAXIMUM_HEAP_SIZE" value="1024" />
- <option name="DEPRECATION" value="false" />
- <option name="UNCHECKED" value="false" />
- </component>
- <component name="SvnBranchConfigurationManager">
- <option name="myConfigurationMap">
- <map>
- <entry key="$PROJECT_DIR$">
- <value>
- <SvnBranchConfiguration>
- <option name="branchUrls">
- <list>
- <option value="https://lampsvn.epfl.ch/svn-repos/scala/scala/branches" />
- <option value="https://lampsvn.epfl.ch/svn-repos/scala/scala/tags" />
- </list>
- </option>
- <option name="trunkUrl" value="https://lampsvn.epfl.ch/svn-repos/scala/scala/trunk" />
- </SvnBranchConfiguration>
- </value>
- </entry>
- <entry key="$PROJECT_DIR$/bin">
- <value>
- <SvnBranchConfiguration>
- <option name="branchUrls">
- <list>
- <option value="https://lampsvn.epfl.ch/svn-repos/scala/scala/branches" />
- <option value="https://lampsvn.epfl.ch/svn-repos/scala/scala/tags" />
- </list>
- </option>
- <option name="trunkUrl" value="https://lampsvn.epfl.ch/svn-repos/scala/scala/trunk" />
- </SvnBranchConfiguration>
- </value>
- </entry>
- <entry key="$PROJECT_DIR$/src">
- <value>
- <SvnBranchConfiguration>
- <option name="branchUrls">
- <list>
- <option value="https://lampsvn.epfl.ch/svn-repos/scala/scala/branches" />
- <option value="https://lampsvn.epfl.ch/svn-repos/scala/scala/tags" />
- </list>
- </option>
- <option name="trunkUrl" value="https://lampsvn.epfl.ch/svn-repos/scala/scala/trunk" />
- </SvnBranchConfiguration>
- </value>
- </entry>
- <entry key="$PROJECT_DIR$/src/actors">
- <value>
- <SvnBranchConfiguration>
- <option name="branchUrls">
- <list>
- <option value="https://lampsvn.epfl.ch/svn-repos/scala/scala/branches" />
- <option value="https://lampsvn.epfl.ch/svn-repos/scala/scala/tags" />
- </list>
- </option>
- <option name="trunkUrl" value="https://lampsvn.epfl.ch/svn-repos/scala/scala/trunk" />
- </SvnBranchConfiguration>
- </value>
- </entry>
- <entry key="$PROJECT_DIR$/src/compiler">
- <value>
- <SvnBranchConfiguration>
- <option name="branchUrls">
- <list>
- <option value="https://lampsvn.epfl.ch/svn-repos/scala/scala/branches" />
- <option value="https://lampsvn.epfl.ch/svn-repos/scala/scala/tags" />
- </list>
- </option>
- <option name="trunkUrl" value="https://lampsvn.epfl.ch/svn-repos/scala/scala/trunk" />
- </SvnBranchConfiguration>
- </value>
- </entry>
- <entry key="$PROJECT_DIR$/src/library">
- <value>
- <SvnBranchConfiguration>
- <option name="branchUrls">
- <list>
- <option value="https://lampsvn.epfl.ch/svn-repos/scala/scala/branches" />
- <option value="https://lampsvn.epfl.ch/svn-repos/scala/scala/tags" />
- </list>
- </option>
- <option name="trunkUrl" value="https://lampsvn.epfl.ch/svn-repos/scala/scala/trunk" />
- </SvnBranchConfiguration>
- </value>
- </entry>
- <entry key="$PROJECT_DIR$/test/disabled/lib/annotations.jar">
- <value>
- <SvnBranchConfiguration />
- </value>
- </entry>
- <entry key="$PROJECT_DIR$/test/disabled/lib/enums.jar">
- <value>
- <SvnBranchConfiguration />
- </value>
- </entry>
- <entry key="$PROJECT_DIR$/test/disabled/lib/nest.jar">
- <value>
- <SvnBranchConfiguration />
- </value>
- </entry>
- <entry key="$PROJECT_DIR$/test/files">
- <value>
- <SvnBranchConfiguration>
- <option name="branchUrls">
- <list>
- <option value="https://lampsvn.epfl.ch/svn-repos/scala/scala/branches" />
- <option value="https://lampsvn.epfl.ch/svn-repos/scala/scala/tags" />
- </list>
- </option>
- <option name="trunkUrl" value="https://lampsvn.epfl.ch/svn-repos/scala/scala/trunk" />
- </SvnBranchConfiguration>
- </value>
- </entry>
- </map>
- </option>
- <option name="myVersion" value="124" />
- <option name="mySupportsUserInfoFilter" value="true" />
- </component>
- <component name="VcsDirectoryMappings">
- <mapping directory="" vcs="svn" />
- </component>
- <component name="WebServicesPlugin" addRequiredLibraries="true" />
- <component name="libraryTable">
- <library name="Project ant library">
- <CLASSES>
- <root url="jar://$PROJECT_DIR$/lib/ant/ant-contrib.jar!/" />
- <root url="jar://$PROJECT_DIR$/lib/ant/vizant.jar!/" />
- <root url="jar://$PROJECT_DIR$/lib/ant/maven-ant-tasks-2.0.9.jar!/" />
- <root url="jar://$PROJECT_DIR$/lib/ant/ant-dotnet-1.0.jar!/" />
- </CLASSES>
- <JAVADOC />
- <SOURCES />
- </library>
- <library name="Project Scala SDK">
- <CLASSES>
- <root url="jar://$PROJECT_DIR$/lib/jline.jar!/" />
- <root url="jar://$PROJECT_DIR$/lib/fjbg.jar!/" />
- <root url="jar://$PROJECT_DIR$/lib/ScalaCheck.jar!/" />
- <root url="jar://$PROJECT_DIR$/lib/msil.jar!/" />
- </CLASSES>
- <JAVADOC />
- <SOURCES />
- </library>
- <library name="ant">
- <CLASSES>
- <root url="jar:///usr/share/ant/lib/ant.jar!/" />
- </CLASSES>
- <JAVADOC />
- <SOURCES />
- </library>
- <library name="locker">
- <CLASSES>
- <root url="file://$PROJECT_DIR$/build/locker/classes/library" />
- <root url="file://$PROJECT_DIR$/build/locker/classes/compiler" />
- </CLASSES>
- <JAVADOC />
- <SOURCES />
- </library>
- </component>
-</project>
-
diff --git a/src/actors/scala/actors/AbstractActor.scala b/src/actors/scala/actors/AbstractActor.scala
index 9c72b307fa..9cc62a1cde 100644
--- a/src/actors/scala/actors/AbstractActor.scala
+++ b/src/actors/scala/actors/AbstractActor.scala
@@ -13,12 +13,11 @@ package scala.actors
/**
* The <code>AbstractActor</code> trait.
*
- * @version 0.9.18
* @author Philipp Haller
*/
-trait AbstractActor extends OutputChannel[Any] with Replyable[Any, Any] {
+trait AbstractActor extends OutputChannel[Any] with CanReply[Any, Any] {
- private[actors] var exiting = false
+ private[actors] def exiting: Boolean = false
private[actors] def linkTo(to: AbstractActor): Unit
diff --git a/src/actors/scala/actors/AbstractReactor.scala b/src/actors/scala/actors/AbstractReactor.scala
index 5c4b082cff..d9dbd39c2b 100644
--- a/src/actors/scala/actors/AbstractReactor.scala
+++ b/src/actors/scala/actors/AbstractReactor.scala
@@ -11,7 +11,6 @@
/**
* The <code>AbstractReactor</code> trait.
*
- * @version 0.9.18
* @author Philipp Haller
*/
package scala.actors
diff --git a/src/actors/scala/actors/Actor.scala b/src/actors/scala/actors/Actor.scala
index 838d3a8f63..491c1eb075 100644
--- a/src/actors/scala/actors/Actor.scala
+++ b/src/actors/scala/actors/Actor.scala
@@ -10,7 +10,7 @@
package scala.actors
-import scala.util.control.ControlException
+import scala.util.control.ControlThrowable
import java.util.{Timer, TimerTask}
import java.util.concurrent.{ExecutionException, Callable}
@@ -22,14 +22,42 @@ import java.util.concurrent.{ExecutionException, Callable}
*
* @author Philipp Haller
*/
-object Actor {
+object Actor extends Combinators {
+
+ /** An actor state. An actor can be in one of the following states:
+ * <ul>
+ * <li>New<br>
+ * An actor that has not yet started is in this state.</li>
+ * <li>Runnable<br>
+ * An actor executing is in this state.</li>
+ * <li>Suspended<br>
+ * An actor that is suspended waiting in a react is in this state.</li>
+ * <li>TimedSuspended<br>
+ * An actor that is suspended waiting in a reactWithin is in this state.</li>
+ * <li>Blocked<br>
+ * An actor that is blocked waiting in a receive is in this state.</li>
+ * <li>TimedBlocked<br>
+ * An actor that is blocked waiting in a receiveWithin is in this state.</li>
+ * <li>Terminated<br>
+ * An actor that has terminated is in this state.</li>
+ * </ul>
+ */
+ object State extends Enumeration {
+ val New,
+ Runnable,
+ Suspended,
+ TimedSuspended,
+ Blocked,
+ TimedBlocked,
+ Terminated = Value
+ }
- private[actors] val tl = new ThreadLocal[Reactor]
+ private[actors] val tl = new ThreadLocal[ReplyReactor]
// timer thread runs as daemon
private[actors] val timer = new Timer(true)
- private[actors] val suspendException = new SuspendActorException
+ private[actors] val suspendException = new SuspendActorControl
/**
* Returns the currently executing actor. Should be used instead
@@ -43,9 +71,10 @@ object Actor {
private[actors] def self(sched: IScheduler): Actor =
rawSelf(sched).asInstanceOf[Actor]
- private[actors] def rawSelf: Reactor = rawSelf(Scheduler)
+ private[actors] def rawSelf: ReplyReactor =
+ rawSelf(Scheduler)
- private[actors] def rawSelf(sched: IScheduler): Reactor = {
+ private[actors] def rawSelf(sched: IScheduler): ReplyReactor = {
val s = tl.get
if (s eq null) {
val r = new ActorProxy(currentThread, sched)
@@ -208,7 +237,7 @@ object Actor {
def eventloop(f: PartialFunction[Any, Unit]): Nothing =
rawSelf.react(new RecursiveProxyHandler(rawSelf, f))
- private class RecursiveProxyHandler(a: Reactor, f: PartialFunction[Any, Unit])
+ private class RecursiveProxyHandler(a: ReplyReactor, f: PartialFunction[Any, Unit])
extends PartialFunction[Any, Unit] {
def isDefinedAt(m: Any): Boolean =
true // events are immediately removed from the mailbox
@@ -222,21 +251,21 @@ object Actor {
* Returns the actor which sent the last received message.
*/
def sender: OutputChannel[Any] =
- rawSelf.asInstanceOf[ReplyReactor].sender
+ rawSelf.sender
/**
* Send <code>msg</code> to the actor waiting in a call to
* <code>!?</code>.
*/
def reply(msg: Any): Unit =
- rawSelf.asInstanceOf[ReplyReactor].reply(msg)
+ rawSelf.reply(msg)
/**
* Send <code>()</code> to the actor waiting in a call to
* <code>!?</code>.
*/
def reply(): Unit =
- rawSelf.asInstanceOf[ReplyReactor].reply(())
+ rawSelf.reply(())
/**
* Returns the number of messages in <code>self</code>'s mailbox
@@ -276,26 +305,6 @@ object Actor {
}
/**
- * Causes <code>self</code> to repeatedly execute
- * <code>body</code>.
- *
- * @param body the code block to be executed
- */
- def loop(body: => Unit): Unit = body andThen loop(body)
-
- /**
- * Causes <code>self</code> to repeatedly execute
- * <code>body</code> while the condition
- * <code>cond</code> is <code>true</code>.
- *
- * @param cond the condition to test
- * @param body the code block to be executed
- */
- def loopWhile(cond: => Boolean)(body: => Unit): Unit =
- if (cond) { body andThen loopWhile(cond)(body) }
- else continue
-
- /**
* Links <code>self</code> to actor <code>to</code>.
*
* @param to the actor to link to
@@ -348,9 +357,8 @@ object Actor {
* <code>Exit(self, 'normal)</code> to <code>a</code>.
* </p>
*/
- def exit(): Nothing = self.exit()
+ def exit(): Nothing = rawSelf.exit()
- def continue: Unit = throw new KillActorException
}
/**
@@ -376,7 +384,7 @@ object Actor {
* @author Philipp Haller
*/
@serializable @SerialVersionUID(-781154067877019505L)
-trait Actor extends AbstractActor with ReplyReactor with ReplyableActor {
+trait Actor extends AbstractActor with ReplyReactor with ActorCanReply {
/* The following two fields are only used when the actor
* suspends by blocking its underlying thread, for example,
@@ -392,12 +400,6 @@ trait Actor extends AbstractActor with ReplyReactor with ReplyableActor {
@volatile
private var received: Option[Any] = None
- /* This option holds a TimerTask when the actor waits in a
- * reactWithin/receiveWithin. The TimerTask is cancelled when
- * the actor can continue.
- */
- private var onTimeout: Option[TimerTask] = None
-
protected[actors] override def scheduler: IScheduler = Scheduler
private[actors] override def startSearch(msg: Any, replyTo: OutputChannel[Any], handler: PartialFunction[Any, Any]) =
@@ -408,19 +410,8 @@ trait Actor extends AbstractActor with ReplyReactor with ReplyableActor {
}
} else super.startSearch(msg, replyTo, handler)
- private[actors] override def makeReaction(fun: () => Unit): Runnable =
- new ActorTask(this, fun)
-
- private[actors] override def resumeReceiver(item: (Any, OutputChannel[Any]), handler: PartialFunction[Any, Any], onSameThread: Boolean) {
- synchronized {
- if (!onTimeout.isEmpty) {
- onTimeout.get.cancel()
- onTimeout = None
- }
- }
- senders = List(item._2)
- super.resumeReceiver(item, handler, onSameThread)
- }
+ private[actors] override def makeReaction(fun: () => Unit, handler: PartialFunction[Any, Any], msg: Any): Runnable =
+ new ActorTask(this, fun, handler, msg)
/**
* Receives a message from this actor's mailbox.
@@ -551,89 +542,18 @@ trait Actor extends AbstractActor with ReplyReactor with ReplyableActor {
result
}
- /**
- * Receives a message from this actor's mailbox.
- * <p>
- * This method never returns. Therefore, the rest of the computation
- * has to be contained in the actions of the partial function.
- *
- * @param f a partial function with message patterns and actions
- */
- override def react(f: PartialFunction[Any, Unit]): Nothing = {
- assert(Actor.self(scheduler) == this, "react on channel belonging to other actor")
+ override def react(handler: PartialFunction[Any, Unit]): Nothing = {
synchronized {
- if (shouldExit) exit() // links
- drainSendBuffer(mailbox)
+ if (shouldExit) exit()
}
- searchMailbox(mailbox, f, false)
- throw Actor.suspendException
+ super.react(handler)
}
- /**
- * Receives a message from this actor's mailbox within a certain
- * time span.
- * <p>
- * This method never returns. Therefore, the rest of the computation
- * has to be contained in the actions of the partial function.
- *
- * @param msec the time span before timeout
- * @param f a partial function with message patterns and actions
- */
- def reactWithin(msec: Long)(f: PartialFunction[Any, Unit]): Nothing = {
- assert(Actor.self(scheduler) == this, "react on channel belonging to other actor")
-
+ override def reactWithin(msec: Long)(handler: PartialFunction[Any, Unit]): Nothing = {
synchronized {
- if (shouldExit) exit() // links
- drainSendBuffer(mailbox)
- }
-
- // first, remove spurious TIMEOUT message from mailbox if any
- mailbox.extractFirst((m: Any, replyTo: OutputChannel[Any]) => m == TIMEOUT)
-
- val receiveTimeout = () => {
- if (f.isDefinedAt(TIMEOUT)) {
- senders = List(this)
- scheduleActor(f, TIMEOUT)
- } else
- error("unhandled timeout")
+ if (shouldExit) exit()
}
-
- var done = false
- while (!done) {
- val qel = mailbox.extractFirst((m: Any, replyTo: OutputChannel[Any]) => {
- senders = List(replyTo)
- f.isDefinedAt(m)
- })
- if (null eq qel) {
- val todo = synchronized {
- // in mean time new stuff might have arrived
- if (!sendBuffer.isEmpty) {
- drainSendBuffer(mailbox)
- // keep going
- () => {}
- } else if (msec == 0L) {
- done = true
- receiveTimeout
- } else {
- waitingFor = f
- val thisActor = this
- onTimeout = Some(new TimerTask {
- def run() { thisActor.send(TIMEOUT, thisActor) }
- })
- Actor.timer.schedule(onTimeout.get, msec)
- done = true
- () => {}
- }
- }
- todo()
- } else {
- senders = List(qel.session)
- scheduleActor(f, qel.msg)
- done = true
- }
- }
-
- throw Actor.suspendException
+ super.reactWithin(msec)(handler)
}
/**
@@ -644,13 +564,13 @@ trait Actor extends AbstractActor with ReplyReactor with ReplyableActor {
}
// guarded by lock of this
- // never throws SuspendActorException
+ // never throws SuspendActorControl
private[actors] override def scheduleActor(f: PartialFunction[Any, Any], msg: Any) =
if (f eq null) {
// do nothing (timeout is handled instead)
}
else {
- val task = new Reaction(this, f, msg)
+ val task = new ActorTask(this, null, f, msg)
scheduler executeFromActor task
}
@@ -681,26 +601,44 @@ trait Actor extends AbstractActor with ReplyReactor with ReplyableActor {
notify()
}
+ private[actors] override def exiting = synchronized {
+ _state == Actor.State.Terminated
+ }
+
/**
* Starts this actor.
*/
override def start(): Actor = synchronized {
- // Reset various flags.
- //
- // Note that we do *not* reset `trapExit`. The reason is that
- // users should be able to set the field in the constructor
- // and before `act` is called.
+ if (_state == Actor.State.New) {
+ _state = Actor.State.Runnable
+
+ // Reset various flags.
+ //
+ // Note that we do *not* reset `trapExit`. The reason is that
+ // users should be able to set the field in the constructor
+ // and before `act` is called.
+ exitReason = 'normal
+ shouldExit = false
- exitReason = 'normal
- exiting = false
- shouldExit = false
+ scheduler newActor this
+ scheduler execute (new Reaction(this))
- scheduler.newActor(this)
- scheduler.execute(new Reaction(this))
+ this
+ } else
+ this
+ }
- this
+ override def getState: Actor.State.Value = synchronized {
+ if (isSuspended) {
+ if (onTimeout.isEmpty)
+ Actor.State.Blocked
+ else
+ Actor.State.TimedBlocked
+ } else
+ super.getState
}
+ // guarded by this
private[actors] var links: List[AbstractActor] = Nil
/**
@@ -750,8 +688,11 @@ trait Actor extends AbstractActor with ReplyReactor with ReplyableActor {
links = links.filterNot(from.==)
}
+ @volatile
var trapExit = false
- private[actors] var exitReason: AnyRef = 'normal
+ // guarded by this
+ private var exitReason: AnyRef = 'normal
+ // guarded by this
private[actors] var shouldExit = false
/**
@@ -771,7 +712,7 @@ trait Actor extends AbstractActor with ReplyReactor with ReplyableActor {
* <code>reason != 'normal</code>.
* </p>
*/
- protected[actors] def exit(reason: AnyRef): Nothing = {
+ protected[actors] def exit(reason: AnyRef): Nothing = synchronized {
exitReason = reason
exit()
}
@@ -779,17 +720,16 @@ trait Actor extends AbstractActor with ReplyReactor with ReplyableActor {
/**
* Terminates with exit reason <code>'normal</code>.
*/
- protected[actors] override def exit(): Nothing = {
- // links
+ protected[actors] override def exit(): Nothing = synchronized {
if (!links.isEmpty)
exitLinked()
- terminated()
- throw Actor.suspendException
+ super.exit()
}
// Assume !links.isEmpty
+ // guarded by this
private[actors] def exitLinked() {
- exiting = true
+ _state = Actor.State.Terminated
// remove this from links
val mylinks = links.filterNot(this.==)
// exit linked processes
@@ -801,6 +741,7 @@ trait Actor extends AbstractActor with ReplyReactor with ReplyableActor {
}
// Assume !links.isEmpty
+ // guarded by this
private[actors] def exitLinked(reason: AnyRef) {
exitReason = reason
exitLinked()
@@ -820,14 +761,14 @@ trait Actor extends AbstractActor with ReplyReactor with ReplyableActor {
// (because shouldExit == true)
if (isSuspended)
resumeActor()
- else if (waitingFor ne waitingForNone) {
+ else if (waitingFor ne Reactor.waitingForNone) {
scheduleActor(waitingFor, null)
- /* Here we should not throw a SuspendActorException,
+ /* Here we should not throw a SuspendActorControl,
since the current method is called from an actor that
is in the process of exiting.
Therefore, the contract for scheduleActor is that
- it never throws a SuspendActorException.
+ it never throws a SuspendActorControl.
*/
}
}
@@ -857,12 +798,18 @@ trait Actor extends AbstractActor with ReplyReactor with ReplyableActor {
* <b>case</b> TIMEOUT <b>=&gt;</b> ...
* }</pre>
*
- * @version 0.9.8
* @author Philipp Haller
*/
case object TIMEOUT
+/** An `Exit` message (an instance of this class) is sent to an actor
+ * with `trapExit` set to `true` whenever one of its linked actors
+ * terminates.
+ *
+ * @param from the actor that terminated
+ * @param reason the reason that caused the actor to terminate
+ */
case class Exit(from: AbstractActor, reason: AnyRef)
/** <p>
@@ -870,7 +817,6 @@ case class Exit(from: AbstractActor, reason: AnyRef)
* executions.
* </p>
*
- * @version 0.9.8
* @author Philipp Haller
*/
-private[actors] class SuspendActorException extends Throwable with ControlException
+private[actors] class SuspendActorControl extends ControlThrowable
diff --git a/src/actors/scala/actors/ReplyableActor.scala b/src/actors/scala/actors/ActorCanReply.scala
index 2122dd854b..8cba425b4c 100644
--- a/src/actors/scala/actors/ReplyableActor.scala
+++ b/src/actors/scala/actors/ActorCanReply.scala
@@ -13,13 +13,12 @@ package scala.actors
import java.util.concurrent.ExecutionException
/**
- * The ReplyableActor trait provides
- * message send operations that may result in a
- * response from the receiver.
+ * The ActorCanReply trait provides message send operations that
+ * may result in a response from the receiver.
*
* @author Philipp Haller
*/
-private[actors] trait ReplyableActor extends ReplyableReactor {
+private[actors] trait ActorCanReply extends ReactorCanReply {
thiz: AbstractActor with ReplyReactor =>
/**
@@ -59,18 +58,18 @@ private[actors] trait ReplyableActor extends ReplyableReactor {
* Sends <code>msg</code> to this actor and immediately
* returns a future representing the reply value.
* The reply is post-processed using the partial function
- * <code>f</code>. This also allows to recover a more
+ * <code>handler</code>. This also allows to recover a more
* precise type for the reply value.
*/
- override def !![A](msg: Any, f: PartialFunction[Any, A]): Future[A] = {
+ override def !![A](msg: Any, handler: PartialFunction[Any, A]): Future[A] = {
val ftch = new Channel[A](Actor.self(thiz.scheduler))
thiz.send(msg, new OutputChannel[Any] {
def !(msg: Any) =
- ftch ! f(msg)
+ ftch ! handler(msg)
def send(msg: Any, replyTo: OutputChannel[Any]) =
- ftch.send(f(msg), replyTo)
+ ftch.send(handler(msg), replyTo)
def forward(msg: Any) =
- ftch.forward(f(msg))
+ ftch.forward(handler(msg))
def receiver =
ftch.receiver
})
@@ -84,19 +83,25 @@ private[actors] trait ReplyableActor extends ReplyableReactor {
override def !!(msg: Any): Future[Any] = {
val ftch = new Channel[Any](Actor.self(thiz.scheduler))
val linkedChannel = new AbstractActor {
- type Future[+R] = scala.actors.Future[R]
- def !(msg: Any) =
+ def !(msg: Any) = {
ftch ! msg
- def send(msg: Any, replyTo: OutputChannel[Any]) =
+ thiz unlinkFrom this
+ }
+ def send(msg: Any, replyTo: OutputChannel[Any]) = {
ftch.send(msg, replyTo)
- def forward(msg: Any) =
+ thiz unlinkFrom this
+ }
+ def forward(msg: Any) = {
ftch.forward(msg)
+ thiz unlinkFrom this
+ }
def receiver =
ftch.receiver
def linkTo(to: AbstractActor) { /* do nothing */ }
def unlinkFrom(from: AbstractActor) { /* do nothing */ }
def exit(from: AbstractActor, reason: AnyRef) {
ftch.send(Exit(from, reason), thiz)
+ thiz unlinkFrom this
}
// should never be invoked; return dummy value
def !?(msg: Any) = msg
@@ -113,7 +118,7 @@ private[actors] trait ReplyableActor extends ReplyableReactor {
Futures.fromInputChannel(someChan)
}
}
- thiz.linkTo(linkedChannel)
+ thiz linkTo linkedChannel
thiz.send(msg, linkedChannel)
new Future[Any](ftch) {
var exitReason: Option[Any] = None
@@ -135,13 +140,13 @@ private[actors] trait ReplyableActor extends ReplyableReactor {
else
throw new ExecutionException(new Exception(reason.toString()))
}
- } else inputChannel.receive(handleReply andThen {(x: Unit) => apply()})
+ } else inputChannel.receive(handleReply andThen { _ => apply() })
def respond(k: Any => Unit): Unit =
if (isSet)
apply()
else
- inputChannel.react(handleReply andThen {(x: Unit) => k(apply())})
+ inputChannel.react(handleReply andThen { _ => k(apply()) })
def isSet = (fvalue match {
case None =>
@@ -150,7 +155,7 @@ private[actors] trait ReplyableActor extends ReplyableReactor {
false
}
val whatToDo =
- handleTimeout orElse (handleReply andThen {(x: Unit) => true})
+ handleTimeout orElse (handleReply andThen { _ => true })
inputChannel.receiveWithin(0)(whatToDo)
case Some(_) => true
}) || !exitReason.isEmpty
diff --git a/src/actors/scala/actors/ActorProxy.scala b/src/actors/scala/actors/ActorProxy.scala
index 6364db87fe..9a79ff14c5 100644
--- a/src/actors/scala/actors/ActorProxy.scala
+++ b/src/actors/scala/actors/ActorProxy.scala
@@ -16,7 +16,6 @@ import java.lang.Thread
* The class <code>ActorProxy</code> provides a dynamic actor proxy for normal
* Java threads.
*
- * @version 0.9.8
* @author Philipp Haller
*/
private[actors] class ActorProxy(t: Thread, override final val scheduler: IScheduler) extends Actor {
diff --git a/src/actors/scala/actors/ActorTask.scala b/src/actors/scala/actors/ActorTask.scala
index 8d8504245f..bceea06072 100644
--- a/src/actors/scala/actors/ActorTask.scala
+++ b/src/actors/scala/actors/ActorTask.scala
@@ -17,14 +17,19 @@ package scala.actors
*
* @author Philipp Haller
*/
-private[actors] class ActorTask(actor: Actor, fun: () => Unit) extends ReactorTask[Actor](actor, fun) {
+private[actors] class ActorTask(actor: Actor,
+ fun: () => Unit,
+ handler: PartialFunction[Any, Any],
+ msg: Any)
+ extends ReplyReactorTask(actor, fun, handler, msg) {
- protected override def beforeExecuting() {
+ protected override def beginExecution() {
+ super.beginExecution()
if (actor.shouldExit)
actor.exit()
}
- protected override def afterExecuting(e: Exception) {
+ protected override def terminateExecution(e: Exception) {
actor.synchronized {
if (!actor.links.isEmpty)
actor.exitLinked(e)
diff --git a/src/actors/scala/actors/Replyable.scala b/src/actors/scala/actors/CanReply.scala
index 2c7e55e06a..23f0a5319b 100644
--- a/src/actors/scala/actors/Replyable.scala
+++ b/src/actors/scala/actors/CanReply.scala
@@ -11,16 +11,15 @@
package scala.actors
/**
- * The Replyable trait defines result-bearing message send operations
- * on replyable actors.
+ * The <code>CanReply</code> trait defines result-bearing message send operations.
*
* @author Philipp Haller
*/
-trait Replyable[-T, +R] {
+trait CanReply[-T, +R] {
/**
- * Sends <code>msg</code> to this Replyable and awaits reply
- * (synchronous).
+ * Sends <code>msg</code> to this <code>CanReply</code> and
+ * awaits reply (synchronous).
*
* @param msg the message to be sent
* @return the reply
@@ -28,8 +27,9 @@ trait Replyable[-T, +R] {
def !?(msg: T): R
/**
- * Sends <code>msg</code> to this Replyable and awaits reply
- * (synchronous) within <code>msec</code> milliseconds.
+ * Sends <code>msg</code> to this <code>CanReply</code> and
+ * awaits reply (synchronous) within <code>msec</code>
+ * milliseconds.
*
* @param msec the time span before timeout
* @param msg the message to be sent
@@ -39,8 +39,8 @@ trait Replyable[-T, +R] {
def !?(msec: Long, msg: T): Option[R]
/**
- * Sends <code>msg</code> to this actor and immediately
- * returns a future representing the reply value.
+ * Sends <code>msg</code> to this <code>CanReply</code> and
+ * immediately returns a future representing the reply value.
*
* @param msg the message to be sent
* @return the future
@@ -49,17 +49,17 @@ trait Replyable[-T, +R] {
() => this !? msg
/**
- * Sends <code>msg</code> to this actor and immediately
- * returns a future representing the reply value.
+ * Sends <code>msg</code> to this <code>CanReply</code> and
+ * immediately returns a future representing the reply value.
* The reply is post-processed using the partial function
- * <code>f</code>. This also allows to recover a more
+ * <code>handler</code>. This also allows to recover a more
* precise type for the reply value.
*
- * @param msg the message to be sent
- * @param f the function to be applied to the response
- * @return the future
+ * @param msg the message to be sent
+ * @param handler the function to be applied to the response
+ * @return the future
*/
- def !![P](msg: T, f: PartialFunction[R, P]): () => P =
- () => f(this !? msg)
+ def !![P](msg: T, handler: PartialFunction[R, P]): () => P =
+ () => handler(this !? msg)
}
diff --git a/src/actors/scala/actors/Channel.scala b/src/actors/scala/actors/Channel.scala
index 24340d22f2..4c37de7665 100644
--- a/src/actors/scala/actors/Channel.scala
+++ b/src/actors/scala/actors/Channel.scala
@@ -25,7 +25,6 @@ package scala.actors
* }
* </pre>
*
- * @version 0.9.8
* @author Philipp Haller
*/
case class ! [a](ch: Channel[a], msg: a)
@@ -35,7 +34,6 @@ case class ! [a](ch: Channel[a], msg: a)
* actors. Only the actor creating an instance of a
* <code>Channel</code> may receive from it.
*
- * @version 0.9.17
* @author Philipp Haller
*/
class Channel[Msg](val receiver: Actor) extends InputChannel[Msg] with OutputChannel[Msg] {
diff --git a/src/actors/scala/actors/Combinators.scala b/src/actors/scala/actors/Combinators.scala
new file mode 100644
index 0000000000..3c0be7ed15
--- /dev/null
+++ b/src/actors/scala/actors/Combinators.scala
@@ -0,0 +1,39 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+// $Id$
+
+package scala.actors
+
+private[actors] trait Combinators {
+
+ implicit def mkBody[a](body: => a): Actor.Body[a]
+
+ /**
+ * Causes <code>self</code> to repeatedly execute
+ * <code>body</code>.
+ *
+ * @param body the code block to be executed
+ */
+ def loop(body: => Unit): Unit = body andThen loop(body)
+
+ /**
+ * Causes <code>self</code> to repeatedly execute
+ * <code>body</code> while the condition
+ * <code>cond</code> is <code>true</code>.
+ *
+ * @param cond the condition to test
+ * @param body the code block to be executed
+ */
+ def loopWhile(cond: => Boolean)(body: => Unit): Unit =
+ if (cond) { body andThen loopWhile(cond)(body) }
+ else continue
+
+ def continue: Unit = throw new KillActorControl
+
+}
diff --git a/src/actors/scala/actors/Future.scala b/src/actors/scala/actors/Future.scala
index ebb0489d88..9f65786879 100644
--- a/src/actors/scala/actors/Future.scala
+++ b/src/actors/scala/actors/Future.scala
@@ -28,6 +28,9 @@ abstract class Future[+T](val inputChannel: InputChannel[T]) extends Responder[T
private[actors] def fvalueTyped = fvalue.get.asInstanceOf[T]
@deprecated("this member is going to be removed in a future release")
+ val ch: InputChannel[Any] = inputChannel
+
+ @deprecated("this member is going to be removed in a future release")
protected def value: Option[Any] = fvalue
@deprecated("this member is going to be removed in a future release")
protected def value_=(x: Option[Any]) { fvalue = x }
@@ -46,11 +49,15 @@ abstract class Future[+T](val inputChannel: InputChannel[T]) extends Responder[T
*/
object Futures {
+ import scala.concurrent.SyncVar
+
private case object Eval
- private class FutureActor[T](fun: () => T, channel: Channel[T])
+ private class FutureActor[T](fun: SyncVar[T] => Unit, channel: Channel[T])
extends Future[T](channel) with DaemonActor {
+ import Actor._
+
def isSet = !fvalue.isEmpty
def apply(): T = {
@@ -70,12 +77,17 @@ object Futures {
}
def act() {
- val res = fun()
- fvalue = Some(res)
- channel ! res
- Actor.loop {
- Actor.react {
- case Eval => Actor.reply()
+ val res = new SyncVar[T]
+
+ {
+ fun(res)
+ } andThen {
+ fvalue = Some(res.get)
+ channel ! res.get
+ loop {
+ react {
+ case Eval => reply()
+ }
}
}
}
@@ -90,7 +102,7 @@ object Futures {
*/
def future[T](body: => T): Future[T] = {
val c = new Channel[T](Actor.self(DaemonScheduler))
- val a = new FutureActor[T](() => body, c)
+ val a = new FutureActor[T](_.set(body), c)
a.start()
a
}
@@ -100,10 +112,16 @@ object Futures {
* @param timespan the time span in ms after which the future resolves
* @return the future
*/
- def alarm(timespan: Long) = future {
- Actor.reactWithin(timespan) {
- case TIMEOUT => {}
+ def alarm(timespan: Long): Future[Unit] = {
+ val c = new Channel[Unit](Actor.self(DaemonScheduler))
+ val fun = (res: SyncVar[Unit]) => {
+ Actor.reactWithin(timespan) {
+ case TIMEOUT => res.set({})
+ }
}
+ val a = new FutureActor[Unit](fun, c)
+ a.start()
+ a
}
/** Waits for the first result returned by one of two
diff --git a/src/actors/scala/actors/IScheduler.scala b/src/actors/scala/actors/IScheduler.scala
index 7a47c670e7..1718dab045 100644
--- a/src/actors/scala/actors/IScheduler.scala
+++ b/src/actors/scala/actors/IScheduler.scala
@@ -49,14 +49,14 @@ trait IScheduler {
*
* @param a the actor to be registered
*/
- def newActor(a: Reactor): Unit
+ def newActor(a: TrackedReactor): Unit
/** Unregisters an actor from this scheduler, because it
* has terminated.
*
* @param a the actor to be registered
*/
- def terminated(a: Reactor): Unit
+ def terminated(a: TrackedReactor): Unit
/** Registers a closure to be executed when the specified
* actor terminates.
@@ -64,7 +64,7 @@ trait IScheduler {
* @param a the actor
* @param f the closure to be registered
*/
- def onTerminate(a: Reactor)(f: => Unit): Unit
+ def onTerminate(a: TrackedReactor)(f: => Unit): Unit
def managedBlock(blocker: scala.concurrent.ManagedBlocker): Unit
diff --git a/src/actors/scala/actors/InputChannel.scala b/src/actors/scala/actors/InputChannel.scala
index 46988159fa..fa2fad43c6 100644
--- a/src/actors/scala/actors/InputChannel.scala
+++ b/src/actors/scala/actors/InputChannel.scala
@@ -14,7 +14,6 @@ package scala.actors
* The <code>InputChannel</code> trait provides a common interface
* for all channels from which values can be received.
*
- * @version 0.9.8
* @author Philipp Haller
*/
trait InputChannel[+Msg] {
diff --git a/src/actors/scala/actors/MessageQueue.scala b/src/actors/scala/actors/MessageQueue.scala
index 000ff1bfc6..2c1c2446e6 100644
--- a/src/actors/scala/actors/MessageQueue.scala
+++ b/src/actors/scala/actors/MessageQueue.scala
@@ -18,14 +18,14 @@ package scala.actors
*/
@serializable @SerialVersionUID(7124278808020037465L)
@deprecated("this class is going to be removed in a future release")
-class MessageQueueElement(msg: Any, session: OutputChannel[Any], next: MessageQueueElement) extends MQueueElement(msg, session, next) {
+class MessageQueueElement(msg: Any, session: OutputChannel[Any], next: MessageQueueElement) extends MQueueElement[Any](msg, session, next) {
def this() = this(null, null, null)
def this(msg: Any, session: OutputChannel[Any]) = this(msg, session, null)
}
-private[actors] class MQueueElement(val msg: Any, val session: OutputChannel[Any], var next: MQueueElement) {
+private[actors] class MQueueElement[Msg >: Null](val msg: Msg, val session: OutputChannel[Any], var next: MQueueElement[Msg]) {
def this() = this(null, null, null)
- def this(msg: Any, session: OutputChannel[Any]) = this(msg, session, null)
+ def this(msg: Msg, session: OutputChannel[Any]) = this(msg, session, null)
}
/**
@@ -38,11 +38,11 @@ private[actors] class MQueueElement(val msg: Any, val session: OutputChannel[Any
*/
@serializable @SerialVersionUID(2168935872884095767L)
@deprecated("this class is going to be removed in a future release")
-class MessageQueue(label: String) extends MQueue(label)
+class MessageQueue(label: String) extends MQueue[Any](label)
-private[actors] class MQueue(protected val label: String) {
- protected var first: MQueueElement = null
- protected var last: MQueueElement = null // last eq null iff list is empty
+private[actors] class MQueue[Msg >: Null](protected val label: String) {
+ protected var first: MQueueElement[Msg] = null
+ protected var last: MQueueElement[Msg] = null // last eq null iff list is empty
private var _size = 0
def size = _size
@@ -52,7 +52,7 @@ private[actors] class MQueue(protected val label: String) {
_size += diff
}
- def append(msg: Any, session: OutputChannel[Any]) {
+ def append(msg: Msg, session: OutputChannel[Any]) {
changeSize(1) // size always increases by 1
val el = new MQueueElement(msg, session)
@@ -62,7 +62,7 @@ private[actors] class MQueue(protected val label: String) {
last = el
}
- def append(el: MQueueElement) {
+ def append(el: MQueueElement[Msg]) {
changeSize(1) // size always increases by 1
if (isEmpty) first = el
@@ -71,7 +71,7 @@ private[actors] class MQueue(protected val label: String) {
last = el
}
- def foreach(f: (Any, OutputChannel[Any]) => Unit) {
+ def foreach(f: (Msg, OutputChannel[Any]) => Unit) {
var curr = first
while (curr != null) {
f(curr.msg, curr.session)
@@ -79,7 +79,7 @@ private[actors] class MQueue(protected val label: String) {
}
}
- def foreachAppend(target: MQueue) {
+ def foreachAppend(target: MQueue[Msg]) {
var curr = first
while (curr != null) {
target.append(curr)
@@ -87,7 +87,7 @@ private[actors] class MQueue(protected val label: String) {
}
}
- def foreachDequeue(target: MQueue) {
+ def foreachDequeue(target: MQueue[Msg]) {
var curr = first
while (curr != null) {
target.append(curr)
@@ -98,7 +98,7 @@ private[actors] class MQueue(protected val label: String) {
_size = 0
}
- def foldLeft[B](z: B)(f: (B, Any) => B): B = {
+ def foldLeft[B](z: B)(f: (B, Msg) => B): B = {
var acc = z
var curr = first
while (curr != null) {
@@ -111,10 +111,10 @@ private[actors] class MQueue(protected val label: String) {
/** Returns the n-th message that satisfies the predicate <code>p</code>
* without removing it.
*/
- def get(n: Int)(p: Any => Boolean): Option[Any] = {
+ def get(n: Int)(p: Msg => Boolean): Option[Msg] = {
var pos = 0
- def test(msg: Any): Boolean =
+ def test(msg: Msg): Boolean =
p(msg) && (pos == n || { pos += 1; false })
var curr = first
@@ -127,16 +127,16 @@ private[actors] class MQueue(protected val label: String) {
/** Removes the n-th message that satisfies the predicate <code>p</code>.
*/
- def remove(n: Int)(p: (Any, OutputChannel[Any]) => Boolean): Option[(Any, OutputChannel[Any])] =
+ def remove(n: Int)(p: (Msg, OutputChannel[Any]) => Boolean): Option[(Msg, OutputChannel[Any])] =
removeInternal(n)(p) map (x => (x.msg, x.session))
/** Extracts the first message that satisfies the predicate <code>p</code>
* or <code>null</code> if <code>p</code> fails for all of them.
*/
- def extractFirst(p: (Any, OutputChannel[Any]) => Boolean): MQueueElement =
+ def extractFirst(p: (Msg, OutputChannel[Any]) => Boolean): MQueueElement[Msg] =
removeInternal(0)(p) orNull
- def extractFirst(pf: PartialFunction[Any, Any]): MQueueElement = {
+ def extractFirst(pf: PartialFunction[Msg, Any]): MQueueElement[Msg] = {
if (isEmpty) // early return
return null
@@ -173,14 +173,14 @@ private[actors] class MQueue(protected val label: String) {
}
}
- private def removeInternal(n: Int)(p: (Any, OutputChannel[Any]) => Boolean): Option[MQueueElement] = {
+ private def removeInternal(n: Int)(p: (Msg, OutputChannel[Any]) => Boolean): Option[MQueueElement[Msg]] = {
var pos = 0
- def foundMsg(x: MQueueElement) = {
+ def foundMsg(x: MQueueElement[Msg]) = {
changeSize(-1)
Some(x)
}
- def test(msg: Any, session: OutputChannel[Any]): Boolean =
+ def test(msg: Msg, session: OutputChannel[Any]): Boolean =
p(msg, session) && (pos == n || { pos += 1 ; false })
if (isEmpty) // early return
@@ -220,7 +220,7 @@ private[actors] class MQueue(protected val label: String) {
/** Debugging trait.
*/
-private[actors] trait MessageQueueTracer extends MQueue
+private[actors] trait MessageQueueTracer extends MQueue[Any]
{
private val queueNumber = MessageQueueTracer.getQueueNumber
@@ -238,7 +238,7 @@ private[actors] trait MessageQueueTracer extends MQueue
printQueue("REMOVE %s" format res)
res
}
- override def extractFirst(p: (Any, OutputChannel[Any]) => Boolean): MQueueElement = {
+ override def extractFirst(p: (Any, OutputChannel[Any]) => Boolean): MQueueElement[Any] = {
val res = super.extractFirst(p)
printQueue("EXTRACT_FIRST %s" format res)
res
@@ -253,7 +253,7 @@ private[actors] trait MessageQueueTracer extends MQueue
override def toString() = "%s:%d".format(label, queueNumber)
}
-object MessageQueueTracer {
+private[actors] object MessageQueueTracer {
// for tracing purposes
private var queueNumberAssigner = 0
private def getQueueNumber = synchronized {
diff --git a/src/actors/scala/actors/OutputChannel.scala b/src/actors/scala/actors/OutputChannel.scala
index 4fdd606a40..514c445944 100644
--- a/src/actors/scala/actors/OutputChannel.scala
+++ b/src/actors/scala/actors/OutputChannel.scala
@@ -14,7 +14,6 @@ package scala.actors
* The <code>OutputChannel</code> trait provides a common interface
* for all channels to which values can be sent.
*
- * @version 0.9.17
* @author Philipp Haller
*/
trait OutputChannel[-Msg] extends AbstractReactor[Msg] {
@@ -42,7 +41,7 @@ trait OutputChannel[-Msg] extends AbstractReactor[Msg] {
def forward(msg: Msg): Unit
/**
- * Returns the <code>Reactor</code> that is
+ * Returns the <code>Actor</code> that is
* receiving from this <code>OutputChannel</code>.
*/
def receiver: Actor
diff --git a/src/actors/scala/actors/ReactChannel.scala b/src/actors/scala/actors/ReactChannel.scala
index 8bbbc04f53..dc31e99711 100644
--- a/src/actors/scala/actors/ReactChannel.scala
+++ b/src/actors/scala/actors/ReactChannel.scala
@@ -15,7 +15,7 @@ package scala.actors
*
* @author Philipp Haller
*/
-private[actors] class ReactChannel[Msg](receiver: Reactor) extends InputChannel[Msg] {
+private[actors] class ReactChannel[Msg](receiver: ReplyReactor) extends InputChannel[Msg] {
private case class SendToReactor(channel: ReactChannel[Msg], msg: Msg)
diff --git a/src/actors/scala/actors/Reaction.scala b/src/actors/scala/actors/Reaction.scala
index 753dd7da83..5a6d9b643c 100644
--- a/src/actors/scala/actors/Reaction.scala
+++ b/src/actors/scala/actors/Reaction.scala
@@ -11,10 +11,10 @@
package scala.actors
-import scala.util.control.ControlException
+import scala.util.control.ControlThrowable
import java.lang.{InterruptedException, Runnable}
-private[actors] class KillActorException extends Throwable with ControlException
+private[actors] class KillActorControl extends ControlThrowable
/** <p>
* The abstract class <code>Reaction</code> associates
@@ -23,15 +23,11 @@ private[actors] class KillActorException extends Throwable with ControlException
* <code>java.lang.Runnable</code></a>.
* </p>
*
- * @deprecated("this class is going to be removed in a future release")
* @author Philipp Haller
*/
-class Reaction(a: Actor, f: PartialFunction[Any, Any], msg: Any) extends ActorTask(a, () => {
- if (f == null)
- a.act()
- else
- f(msg)
-}) {
+@deprecated("This class will be removed in a future release")
+class Reaction(a: Actor, f: PartialFunction[Any, Any], msg: Any)
+extends ActorTask(a, if (f == null) (() => a.act()) else null, f, msg) {
def this(a: Actor) = this(a, null, null)
diff --git a/src/actors/scala/actors/Reactor.scala b/src/actors/scala/actors/Reactor.scala
index eb0485263b..db43921056 100644
--- a/src/actors/scala/actors/Reactor.scala
+++ b/src/actors/scala/actors/Reactor.scala
@@ -10,18 +10,40 @@
package scala.actors
-import scala.actors.scheduler.{DelegatingScheduler, DefaultThreadPoolScheduler}
-import scala.collection.mutable.Queue
+import scala.actors.scheduler.{DelegatingScheduler, ExecutorScheduler,
+ ForkJoinScheduler, ThreadPoolConfig}
+import java.util.concurrent.{ThreadPoolExecutor, TimeUnit, LinkedBlockingQueue}
+
+private[actors] object Reactor {
-private object Reactor {
val scheduler = new DelegatingScheduler {
def makeNewScheduler: IScheduler = {
- val s = new DefaultThreadPoolScheduler(false)
- Debug.info(this+": starting new "+s+" ["+s.getClass+"]")
- s.start()
- s
+ val sched = if (!ThreadPoolConfig.useForkJoin) {
+ // default is non-daemon
+ val workQueue = new LinkedBlockingQueue[Runnable]
+ ExecutorScheduler(
+ new ThreadPoolExecutor(ThreadPoolConfig.corePoolSize,
+ ThreadPoolConfig.maxPoolSize,
+ 60000L,
+ TimeUnit.MILLISECONDS,
+ workQueue,
+ new ThreadPoolExecutor.CallerRunsPolicy))
+ } else {
+ // default is non-daemon, non-fair
+ val s = new ForkJoinScheduler(ThreadPoolConfig.corePoolSize, ThreadPoolConfig.maxPoolSize, false, false)
+ s.start()
+ s
+ }
+ Debug.info(this+": starting new "+sched+" ["+sched.getClass+"]")
+ sched
}
}
+
+ val waitingForNone = new PartialFunction[Any, Unit] {
+ def isDefinedAt(x: Any) = false
+ def apply(x: Any) {}
+ }
+
}
/**
@@ -29,34 +51,35 @@ private object Reactor {
*
* @author Philipp Haller
*/
-trait Reactor extends OutputChannel[Any] {
+trait Reactor[Msg >: Null] extends OutputChannel[Msg] with Combinators {
/* The actor's mailbox. */
- private[actors] val mailbox = new MQueue("Reactor")
+ private[actors] val mailbox = new MQueue[Msg]("Reactor")
// guarded by this
- private[actors] val sendBuffer = new MQueue("SendBuffer")
+ private[actors] val sendBuffer = new MQueue[Msg]("SendBuffer")
- /* Whenever this actor executes on some thread, waitingFor is
- * guaranteed to be equal to waitingForNone.
+ /* Whenever this actor executes on some thread, `waitingFor` is
+ * guaranteed to be equal to `Reactor.waitingForNone`.
*
- * In other words, whenever waitingFor is not equal to
- * waitingForNone, this actor is guaranteed not to execute on some
- * thread.
+ * In other words, whenever `waitingFor` is not equal to
+ * `Reactor.waitingForNone`, this actor is guaranteed not to execute
+ * on some thread.
+ *
+ * If the actor waits in a `react`, `waitingFor` holds the
+ * message handler that `react` was called with.
+ *
+ * guarded by this
*/
- private[actors] val waitingForNone = new PartialFunction[Any, Unit] {
- def isDefinedAt(x: Any) = false
- def apply(x: Any) {}
- }
+ private[actors] var waitingFor: PartialFunction[Msg, Any] =
+ Reactor.waitingForNone
- /* If the actor waits in a react, waitingFor holds the
- * message handler that react was called with.
- */
- private[actors] var waitingFor: PartialFunction[Any, Any] = waitingForNone // guarded by lock of this
+ // guarded by this
+ private[actors] var _state: Actor.State.Value = Actor.State.New
/**
- * The behavior of an actor is specified by implementing this
- * abstract method.
+ * The behavior of a <code>Reactor</code> is specified by implementing
+ * this method.
*/
def act(): Unit
@@ -76,11 +99,11 @@ trait Reactor extends OutputChannel[Any] {
* @param msg the message to send
* @param replyTo the reply destination
*/
- def send(msg: Any, replyTo: OutputChannel[Any]) {
+ def send(msg: Msg, replyTo: OutputChannel[Any]) {
val todo = synchronized {
- if (waitingFor ne waitingForNone) {
+ if (waitingFor ne Reactor.waitingForNone) {
val savedWaitingFor = waitingFor
- waitingFor = waitingForNone
+ waitingFor = Reactor.waitingForNone
startSearch(msg, replyTo, savedWaitingFor)
} else {
sendBuffer.append(msg, replyTo)
@@ -90,49 +113,53 @@ trait Reactor extends OutputChannel[Any] {
todo()
}
- private[actors] def startSearch(msg: Any, replyTo: OutputChannel[Any], handler: PartialFunction[Any, Any]) =
- () => scheduler execute (makeReaction(() => {
- val startMbox = new MQueue("Start")
+ private[actors] def startSearch(msg: Msg, replyTo: OutputChannel[Any], handler: PartialFunction[Msg, Any]) =
+ () => scheduler execute makeReaction(() => {
+ val startMbox = new MQueue[Msg]("Start")
synchronized { startMbox.append(msg, replyTo) }
searchMailbox(startMbox, handler, true)
- }))
+ })
- private[actors] def makeReaction(fun: () => Unit): Runnable =
- new ReactorTask(this, fun)
+ private[actors] final def makeReaction(fun: () => Unit): Runnable =
+ makeReaction(fun, null, null)
- private[actors] def resumeReceiver(item: (Any, OutputChannel[Any]), handler: PartialFunction[Any, Any], onSameThread: Boolean) {
+ /* This method is supposed to be overridden. */
+ private[actors] def makeReaction(fun: () => Unit, handler: PartialFunction[Msg, Any], msg: Msg): Runnable =
+ new ReactorTask(this, fun, handler, msg)
+
+ private[actors] def resumeReceiver(item: (Msg, OutputChannel[Any]), handler: PartialFunction[Msg, Any], onSameThread: Boolean) {
if (onSameThread)
- handler(item._1)
- else {
+ makeReaction(null, handler, item._1).run()
+ else
scheduleActor(handler, item._1)
- /* Here, we throw a SuspendActorException to avoid
- terminating this actor when the current ReactorTask
- is finished.
- The SuspendActorException skips the termination code
- in ReactorTask.
- */
- throw Actor.suspendException
- }
+ /* Here, we throw a SuspendActorControl to avoid
+ terminating this actor when the current ReactorTask
+ is finished.
+
+ The SuspendActorControl skips the termination code
+ in ReactorTask.
+ */
+ throw Actor.suspendException
}
- def !(msg: Any) {
+ def !(msg: Msg) {
send(msg, null)
}
- def forward(msg: Any) {
+ def forward(msg: Msg) {
send(msg, null)
}
def receiver: Actor = this.asInstanceOf[Actor]
// guarded by this
- private[actors] def drainSendBuffer(mbox: MQueue) {
+ private[actors] def drainSendBuffer(mbox: MQueue[Msg]) {
sendBuffer.foreachDequeue(mbox)
}
- private[actors] def searchMailbox(startMbox: MQueue,
- handler: PartialFunction[Any, Any],
+ private[actors] def searchMailbox(startMbox: MQueue[Msg],
+ handler: PartialFunction[Msg, Any],
resumeOnSameThread: Boolean) {
var tmpMbox = startMbox
var done = false
@@ -144,16 +171,16 @@ trait Reactor extends OutputChannel[Any] {
synchronized {
// in mean time new stuff might have arrived
if (!sendBuffer.isEmpty) {
- tmpMbox = new MQueue("Temp")
+ tmpMbox = new MQueue[Msg]("Temp")
drainSendBuffer(tmpMbox)
// keep going
} else {
waitingFor = handler
- /* Here, we throw a SuspendActorException to avoid
+ /* Here, we throw a SuspendActorControl to avoid
terminating this actor when the current ReactorTask
is finished.
- The SuspendActorException skips the termination code
+ The SuspendActorControl skips the termination code
in ReactorTask.
*/
throw Actor.suspendException
@@ -166,10 +193,17 @@ trait Reactor extends OutputChannel[Any] {
}
}
- protected[actors] def react(f: PartialFunction[Any, Unit]): Nothing = {
- assert(Actor.rawSelf(scheduler) == this, "react on channel belonging to other actor")
+ /**
+ * Receives a message from this actor's mailbox.
+ * <p>
+ * This method never returns. Therefore, the rest of the computation
+ * has to be contained in the actions of the partial function.
+ *
+ * @param handler a partial function with message patterns and actions
+ */
+ protected[actors] def react(handler: PartialFunction[Msg, Unit]): Nothing = {
synchronized { drainSendBuffer(mailbox) }
- searchMailbox(mailbox, f, false)
+ searchMailbox(mailbox, handler, false)
throw Actor.suspendException
}
@@ -178,51 +212,71 @@ trait Reactor extends OutputChannel[Any] {
*
* assume handler != null
*
- * never throws SuspendActorException
+ * never throws SuspendActorControl
*/
- private[actors] def scheduleActor(handler: PartialFunction[Any, Any], msg: Any) = {
- val fun = () => handler(msg)
- val task = new ReactorTask(this, fun)
- scheduler executeFromActor task
+ private[actors] def scheduleActor(handler: PartialFunction[Msg, Any], msg: Msg) {
+ scheduler executeFromActor makeReaction(null, handler, msg)
}
- def start(): Reactor = {
- scheduler.newActor(this)
- val task = new ReactorTask(this, () => act())
- scheduler execute task
- this
+ def start(): Reactor[Msg] = synchronized {
+ if (_state == Actor.State.New) {
+ _state = Actor.State.Runnable
+ scheduler newActor this
+ scheduler execute makeReaction(() => act())
+ this
+ } else
+ this
+ }
+
+ /** Returns the execution state of this actor.
+ *
+ * @return the execution state
+ */
+ def getState: Actor.State.Value = synchronized {
+ if (waitingFor ne Reactor.waitingForNone)
+ Actor.State.Suspended
+ else
+ _state
+ }
+
+ implicit def mkBody[A](body: => A) = new Actor.Body[A] {
+ def andThen[B](other: => B): Unit = Reactor.this.seq(body, other)
}
/* This closure is used to implement control-flow operations
* built on top of `seq`. Note that the only invocation of
- * `kill` is supposed to be inside `Reaction.run`.
+ * `kill` is supposed to be inside `ReactorTask.run`.
*/
@volatile
private[actors] var kill: () => Unit =
() => { exit() }
private[actors] def seq[a, b](first: => a, next: => b): Unit = {
- val s = Actor.rawSelf(scheduler)
- val killNext = s.kill
- s.kill = () => {
- s.kill = killNext
+ val killNext = this.kill
+ this.kill = () => {
+ this.kill = killNext
// to avoid stack overflow:
// instead of directly executing `next`,
// schedule as continuation
- scheduleActor({ case _ => next }, 1)
+ scheduleActor({ case _ => next }, null)
throw Actor.suspendException
}
first
- throw new KillActorException
+ throw new KillActorControl
}
- protected[this] def exit(): Nothing = {
+ protected[actors] def exit(): Nothing = {
terminated()
throw Actor.suspendException
}
private[actors] def terminated() {
+ synchronized {
+ _state = Actor.State.Terminated
+ // reset waitingFor, otherwise getState returns Suspended
+ waitingFor = Reactor.waitingForNone
+ }
scheduler.terminated(this)
}
diff --git a/src/actors/scala/actors/ReplyableReactor.scala b/src/actors/scala/actors/ReactorCanReply.scala
index ecca50e26d..c53e3a78e1 100644
--- a/src/actors/scala/actors/ReplyableReactor.scala
+++ b/src/actors/scala/actors/ReactorCanReply.scala
@@ -11,13 +11,12 @@
package scala.actors
/**
- * The ReplyableReactor trait provides
- * message send operations that may result in a
- * response from the receiver.
+ * The ReactorCanReply trait provides message send operations that
+ * may result in a response from the receiver.
*
* @author Philipp Haller
*/
-private[actors] trait ReplyableReactor extends Replyable[Any, Any] {
+private[actors] trait ReactorCanReply extends CanReply[Any, Any] {
_: ReplyReactor =>
/**
@@ -67,27 +66,27 @@ private[actors] trait ReplyableReactor extends Replyable[Any, Any] {
* Sends <code>msg</code> to this actor and immediately
* returns a future representing the reply value.
* The reply is post-processed using the partial function
- * <code>f</code>. This also allows to recover a more
+ * <code>handler</code>. This also allows to recover a more
* precise type for the reply value.
*/
- override def !![A](msg: Any, f: PartialFunction[Any, A]): Future[A] = {
+ override def !![A](msg: Any, handler: PartialFunction[Any, A]): Future[A] = {
val myself = Actor.rawSelf(this.scheduler)
val ftch = new ReactChannel[A](myself)
val res = new scala.concurrent.SyncVar[A]
val out = new OutputChannel[Any] {
def !(msg: Any) = {
- val msg1 = f(msg)
+ val msg1 = handler(msg)
ftch ! msg1
res set msg1
}
def send(msg: Any, replyTo: OutputChannel[Any]) = {
- val msg1 = f(msg)
+ val msg1 = handler(msg)
ftch.send(msg1, replyTo)
res set msg1
}
def forward(msg: Any) = {
- val msg1 = f(msg)
+ val msg1 = handler(msg)
ftch forward msg1
res set msg1
}
diff --git a/src/actors/scala/actors/ReactorTask.scala b/src/actors/scala/actors/ReactorTask.scala
index 37aec0f8ec..ac809f04ff 100644
--- a/src/actors/scala/actors/ReactorTask.scala
+++ b/src/actors/scala/actors/ReactorTask.scala
@@ -8,58 +8,74 @@
// $Id$
-
package scala.actors
import java.lang.Runnable
import java.util.concurrent.Callable
+import scala.concurrent.forkjoin.RecursiveAction
+
/** <p>
* The class <code>ReactorTask</code>.
* </p>
*
* @author Philipp Haller
*/
-private[actors] class ReactorTask[T >: Null <: Reactor](var reactor: T, var fun: () => Any)
- extends Callable[Unit] with Runnable {
+private[actors] class ReactorTask[Msg >: Null](var reactor: Reactor[Msg],
+ var fun: () => Any,
+ var handler: PartialFunction[Msg, Any],
+ var msg: Msg)
+ extends RecursiveAction with Callable[Unit] with Runnable {
def run() {
- val saved = Actor.tl.get
- Actor.tl set reactor
try {
- beforeExecuting()
+ beginExecution()
try {
try {
- fun()
+ if (fun eq null)
+ handler(msg)
+ else
+ fun()
} catch {
case e: Exception if (reactor.exceptionHandler.isDefinedAt(e)) =>
reactor.exceptionHandler(e)
}
} catch {
- case _: KillActorException =>
+ case _: KillActorControl =>
}
reactor.kill()
}
catch {
- case _: SuspendActorException =>
+ case _: SuspendActorControl =>
// do nothing (continuation is already saved)
case e: Exception =>
- Debug.info(reactor+": caught "+e)
- Debug.doInfo { e.printStackTrace() }
+ // print message on default error stream
+ val msgException = "Uncaught exception in "+reactor+"\n"
+ val msgMessage = if (msg != null) "Message: "+msg+"\n" else ""
+ Console.err.print(msgException + msgMessage)
+ e.printStackTrace()
+
+ val uncaught = new UncaughtException(reactor, if (msg != null) Some(msg) else None, currentThread, e)
reactor.terminated()
- afterExecuting(e)
+ terminateExecution(uncaught)
} finally {
- Actor.tl set saved
+ suspendExecution()
this.reactor = null
this.fun = null
+ this.handler = null
+ this.msg = null
}
}
def call() = run()
- protected def beforeExecuting() {}
+ def compute() = run()
+
+ protected def beginExecution() {}
+
+ protected def suspendExecution() {}
- protected def afterExecuting(e: Exception) {}
+ protected def terminateExecution(e: Exception) {}
}
diff --git a/src/actors/scala/actors/ReplyReactor.scala b/src/actors/scala/actors/ReplyReactor.scala
index d5936ae662..26057ab9cb 100644
--- a/src/actors/scala/actors/ReplyReactor.scala
+++ b/src/actors/scala/actors/ReplyReactor.scala
@@ -10,6 +10,8 @@
package scala.actors
+import java.util.{Timer, TimerTask}
+
/** <p>
* The <code>ReplyReactor</code> trait extends the <code>Reactor</code>
* trait with methods to reply to the sender of a message.
@@ -19,17 +21,26 @@ package scala.actors
*
* @author Philipp Haller
*/
-trait ReplyReactor extends Reactor with ReplyableReactor {
+trait ReplyReactor extends Reactor[Any] with ReactorCanReply {
/* A list of the current senders. The head of the list is
* the sender of the message that was received last.
*/
@volatile
- private[actors] var senders: List[OutputChannel[Any]] =
- Nil
+ private[actors] var senders: List[OutputChannel[Any]] = List()
+
+ /* This option holds a TimerTask when the actor waits in a
+ * reactWithin. The TimerTask is cancelled when the actor
+ * resumes.
+ *
+ * guarded by this
+ */
+ private[actors] var onTimeout: Option[TimerTask] = None
- protected[actors] def sender: OutputChannel[Any] =
- senders.head
+ /**
+ * Returns the actor which sent the last received message.
+ */
+ protected[actors] def sender: OutputChannel[Any] = senders.head
/**
* Replies with <code>msg</code> to the sender.
@@ -53,17 +64,17 @@ trait ReplyReactor extends Reactor with ReplyableReactor {
}
private[actors] override def resumeReceiver(item: (Any, OutputChannel[Any]), handler: PartialFunction[Any, Any], onSameThread: Boolean) {
- senders = List(item._2)
- if (onSameThread)
- handler(item._1)
- else {
- scheduleActor(handler, item._1)
- // see Reactor.resumeReceiver
- throw Actor.suspendException
+ synchronized {
+ if (!onTimeout.isEmpty) {
+ onTimeout.get.cancel()
+ onTimeout = None
+ }
}
+ senders = List(item._2)
+ super.resumeReceiver(item, handler, onSameThread)
}
- private[actors] override def searchMailbox(startMbox: MQueue,
+ private[actors] override def searchMailbox(startMbox: MQueue[Any],
handler: PartialFunction[Any, Any],
resumeOnSameThread: Boolean) {
var tmpMbox = startMbox
@@ -79,7 +90,7 @@ trait ReplyReactor extends Reactor with ReplyableReactor {
synchronized {
// in mean time new stuff might have arrived
if (!sendBuffer.isEmpty) {
- tmpMbox = new MQueue("Temp")
+ tmpMbox = new MQueue[Any]("Temp")
drainSendBuffer(tmpMbox)
// keep going
} else {
@@ -95,4 +106,70 @@ trait ReplyReactor extends Reactor with ReplyableReactor {
}
}
+ private[actors] override def makeReaction(fun: () => Unit, handler: PartialFunction[Any, Any], msg: Any): Runnable =
+ new ReplyReactorTask(this, fun, handler, msg)
+
+ protected[actors] override def react(handler: PartialFunction[Any, Unit]): Nothing = {
+ assert(Actor.rawSelf(scheduler) == this, "react on channel belonging to other actor")
+ super.react(handler)
+ }
+
+ /**
+ * Receives a message from this actor's mailbox within a certain
+ * time span.
+ * <p>
+ * This method never returns. Therefore, the rest of the computation
+ * has to be contained in the actions of the partial function.
+ *
+ * @param msec the time span before timeout
+ * @param handler a partial function with message patterns and actions
+ */
+ protected[actors] def reactWithin(msec: Long)(handler: PartialFunction[Any, Unit]): Nothing = {
+ assert(Actor.rawSelf(scheduler) == this, "react on channel belonging to other actor")
+
+ synchronized { drainSendBuffer(mailbox) }
+
+ // first, remove spurious TIMEOUT message from mailbox if any
+ mailbox.extractFirst((m: Any, replyTo: OutputChannel[Any]) => m == TIMEOUT)
+
+ while (true) {
+ val qel = mailbox.extractFirst((m: Any, replyTo: OutputChannel[Any]) => {
+ senders = List(replyTo)
+ handler isDefinedAt m
+ })
+ if (null eq qel) {
+ synchronized {
+ // in mean time new messages might have arrived
+ if (!sendBuffer.isEmpty) {
+ drainSendBuffer(mailbox)
+ // keep going
+ } else if (msec == 0L) {
+ // throws Actor.suspendException
+ resumeReceiver((TIMEOUT, this), handler, false)
+ } else {
+ waitingFor = handler
+ val thisActor = this
+ onTimeout = Some(new TimerTask {
+ def run() { thisActor.send(TIMEOUT, thisActor) }
+ })
+ Actor.timer.schedule(onTimeout.get, msec)
+ throw Actor.suspendException
+ }
+ }
+ } else
+ resumeReceiver((qel.msg, qel.session), handler, false)
+ }
+ throw Actor.suspendException
+ }
+
+ override def getState: Actor.State.Value = synchronized {
+ if (waitingFor ne Reactor.waitingForNone) {
+ if (onTimeout.isEmpty)
+ Actor.State.Suspended
+ else
+ Actor.State.TimedSuspended
+ } else
+ _state
+ }
+
}
diff --git a/src/actors/scala/actors/ReplyReactorTask.scala b/src/actors/scala/actors/ReplyReactorTask.scala
new file mode 100644
index 0000000000..59150276c0
--- /dev/null
+++ b/src/actors/scala/actors/ReplyReactorTask.scala
@@ -0,0 +1,36 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+// $Id$
+
+package scala.actors
+
+/** <p>
+ * The class <code>ReplyReactorTask</code>.
+ * </p>
+ *
+ * @author Philipp Haller
+ */
+private[actors] class ReplyReactorTask(reactor: ReplyReactor,
+ fun: () => Unit,
+ handler: PartialFunction[Any, Any],
+ msg: Any)
+ extends ReactorTask(reactor, fun, handler, msg) {
+
+ var saved: ReplyReactor = _
+
+ protected override def beginExecution() {
+ saved = Actor.tl.get
+ Actor.tl set reactor
+ }
+
+ protected override def suspendExecution() {
+ Actor.tl set saved
+ }
+
+}
diff --git a/src/actors/scala/actors/Scheduler.scala b/src/actors/scala/actors/Scheduler.scala
index d653271c1e..6792b65abe 100644
--- a/src/actors/scala/actors/Scheduler.scala
+++ b/src/actors/scala/actors/Scheduler.scala
@@ -25,10 +25,12 @@ object Scheduler extends DelegatingScheduler {
def makeNewScheduler: IScheduler = {
val sched = if (!ThreadPoolConfig.useForkJoin) {
+ // default is non-daemon
val s = new ResizableThreadPoolScheduler(false)
s.start()
s
} else {
+ // default is non-daemon, fair
val s = new ForkJoinScheduler
s.start()
s
diff --git a/src/actors/scala/actors/SchedulerAdapter.scala b/src/actors/scala/actors/SchedulerAdapter.scala
index f88d15bb38..698096c556 100644
--- a/src/actors/scala/actors/SchedulerAdapter.scala
+++ b/src/actors/scala/actors/SchedulerAdapter.scala
@@ -42,7 +42,7 @@ trait SchedulerAdapter extends IScheduler {
*
* @param a the actor to be registered
*/
- def newActor(a: Reactor) =
+ def newActor(a: TrackedReactor) =
Scheduler.newActor(a)
/** Unregisters an actor from this scheduler, because it
@@ -50,7 +50,7 @@ trait SchedulerAdapter extends IScheduler {
*
* @param a the actor to be unregistered
*/
- def terminated(a: Reactor) =
+ def terminated(a: TrackedReactor) =
Scheduler.terminated(a)
/** Registers a closure to be executed when the specified
@@ -59,7 +59,7 @@ trait SchedulerAdapter extends IScheduler {
* @param a the actor
* @param f the closure to be registered
*/
- def onTerminate(a: Reactor)(f: => Unit) =
+ def onTerminate(a: TrackedReactor)(f: => Unit) =
Scheduler.onTerminate(a)(f)
def managedBlock(blocker: scala.concurrent.ManagedBlocker) {
diff --git a/src/actors/scala/actors/UncaughtException.scala b/src/actors/scala/actors/UncaughtException.scala
new file mode 100644
index 0000000000..30043465a1
--- /dev/null
+++ b/src/actors/scala/actors/UncaughtException.scala
@@ -0,0 +1,31 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.actors
+
+/**
+ * The exit reason when an actor fails to catch an exception.
+ *
+ * @param actor the actor that threw the exception
+ * @param message the message the actor was processing, or None if no message (e.g. on initial startup)
+ * @param thread the thread on which the actor was running
+ * @param cause the uncaught exception
+ *
+ * @author Philipp Haller
+ * @author Erik Engbrecht
+ */
+class UncaughtException[Msg >: Null](val actor: Reactor[Msg],
+ val message: Option[Msg],
+ val thread: Thread,
+ cause: Exception)
+extends Exception(cause) {
+
+ override def toString() =
+ "UncaughtException("+actor+","+message+","+cause+")"
+
+}
diff --git a/src/actors/scala/actors/package.scala b/src/actors/scala/actors/package.scala
index 7075518931..98170b83dd 100644
--- a/src/actors/scala/actors/package.scala
+++ b/src/actors/scala/actors/package.scala
@@ -1,6 +1,10 @@
package scala
package object actors {
+
+ // type of Reactors tracked by termination detector
+ private[actors] type TrackedReactor = Reactor[A] forSome { type A >: Null }
+
@deprecated("use scala.actors.scheduler.ForkJoinScheduler instead")
type FJTaskScheduler2 = scala.actors.scheduler.ForkJoinScheduler
@@ -13,6 +17,16 @@ package object actors {
@deprecated("this class is going to be removed in a future release")
type WorkerThread = java.lang.Thread
+ @deprecated("use scala.actors.scheduler.SingleThreadedScheduler instead")
+ type SingleThreadedScheduler = scala.actors.scheduler.SingleThreadedScheduler
+
+ // This used to do a blind cast and throw a CCE after the package
+ // object was loaded. I have replaced with a variation that should work
+ // in whatever cases that was working but fail less exceptionally for
+ // those not intentionally using it.
@deprecated("this value is going to be removed in a future release")
- val ActorGC = scala.actors.Scheduler.impl.asInstanceOf[scala.actors.scheduler.ThreadPoolScheduler]
+ val ActorGC = scala.actors.Scheduler.impl match {
+ case x: scala.actors.scheduler.ActorGC => x
+ case _ => null
+ }
}
diff --git a/src/actors/scala/actors/remote/NetKernel.scala b/src/actors/scala/actors/remote/NetKernel.scala
index 9c2b18ada2..fc79fdec54 100644
--- a/src/actors/scala/actors/remote/NetKernel.scala
+++ b/src/actors/scala/actors/remote/NetKernel.scala
@@ -140,7 +140,7 @@ private[remote] class NetKernel(service: Service) {
def terminate() {
// tell all proxies to terminate
- proxies.valuesIterator foreach { p => p.send(Terminate, null) }
+ proxies.values foreach { _.send(Terminate, null) }
// tell service to terminate
service.terminate()
diff --git a/src/actors/scala/actors/ActorGC.scala b/src/actors/scala/actors/scheduler/ActorGC.scala
index af1b0ed82d..0fe94b09c9 100644
--- a/src/actors/scala/actors/ActorGC.scala
+++ b/src/actors/scala/actors/scheduler/ActorGC.scala
@@ -9,11 +9,11 @@
// $Id$
package scala.actors
+package scheduler
import java.lang.ref.{Reference, WeakReference, ReferenceQueue}
import scala.collection.mutable.HashSet
-import scala.actors.scheduler.TerminationMonitor
/**
* ActorGC keeps track of the number of live actors being managed by a
@@ -29,19 +29,19 @@ trait ActorGC extends TerminationMonitor {
self: IScheduler =>
/** Actors are added to refQ in newActor. */
- private val refQ = new ReferenceQueue[Reactor]
+ private val refQ = new ReferenceQueue[TrackedReactor]
/**
* This is a set of references to all the actors registered with
* this ActorGC. It is maintained so that the WeakReferences will not be GC'd
* before the actors to which they point.
*/
- private val refSet = new HashSet[Reference[t] forSome { type t <: Reactor }]
+ private val refSet = new HashSet[Reference[t] forSome { type t <: TrackedReactor }]
/** newActor is invoked whenever a new actor is started. */
- override def newActor(a: Reactor) = synchronized {
+ override def newActor(a: TrackedReactor) = synchronized {
// registers a reference to the actor with the ReferenceQueue
- val wr = new WeakReference[Reactor](a, refQ)
+ val wr = new WeakReference[TrackedReactor](a, refQ)
refSet += wr
activeActors += 1
}
@@ -71,20 +71,20 @@ trait ActorGC extends TerminationMonitor {
activeActors <= 0
}
- override def onTerminate(a: Reactor)(f: => Unit): Unit = synchronized {
+ override def onTerminate(a: TrackedReactor)(f: => Unit): Unit = synchronized {
terminationHandlers += (a -> (() => f))
}
- override def terminated(a: Reactor) = {
+ override def terminated(a: TrackedReactor) = {
super.terminated(a)
synchronized {
// find the weak reference that points to the terminated actor, if any
- refSet.find((ref: Reference[t] forSome { type t <: Reactor }) => ref.get() == a) match {
+ refSet.find((ref: Reference[t] forSome { type t <: TrackedReactor }) => ref.get() == a) match {
case Some(r) =>
// invoking clear will not cause r to be enqueued
r.clear()
- refSet -= r.asInstanceOf[Reference[t] forSome { type t <: Reactor }]
+ refSet -= r.asInstanceOf[Reference[t] forSome { type t <: TrackedReactor }]
case None =>
// do nothing
}
diff --git a/src/actors/scala/actors/scheduler/DaemonScheduler.scala b/src/actors/scala/actors/scheduler/DaemonScheduler.scala
index 02f652db0b..257e847a6a 100644
--- a/src/actors/scala/actors/scheduler/DaemonScheduler.scala
+++ b/src/actors/scala/actors/scheduler/DaemonScheduler.scala
@@ -16,7 +16,7 @@ package scheduler
*/
object DaemonScheduler extends DelegatingScheduler {
- def makeNewScheduler(): IScheduler = {
+ protected def makeNewScheduler(): IScheduler = {
val sched = if (!ThreadPoolConfig.useForkJoin) {
val s = new ResizableThreadPoolScheduler(true)
s.start()
diff --git a/src/actors/scala/actors/scheduler/DefaultThreadPoolScheduler.scala b/src/actors/scala/actors/scheduler/DefaultThreadPoolScheduler.scala
deleted file mode 100644
index 4fed00ba26..0000000000
--- a/src/actors/scala/actors/scheduler/DefaultThreadPoolScheduler.scala
+++ /dev/null
@@ -1,49 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-package scala.actors.scheduler
-
-import java.util.concurrent.{ThreadPoolExecutor, TimeUnit, LinkedBlockingQueue,
- ThreadFactory}
-
-/**
- * The <code>DefaultThreadPoolScheduler</code> class uses a default
- * <code>ThreadPoolExecutor</code> for executing <code>Actor</code>s.
- *
- * It can be configured using the two JVM properties
- * <code>actors.corePoolSize</code> and
- * <code>actors.maxPoolSize</code> that control the initial and
- * maximum size of the thread pool, respectively.
- *
- * @author Philipp Haller
- */
-private[actors] class DefaultThreadPoolScheduler(daemon: Boolean) extends ThreadPoolScheduler(daemon) {
-
- executor = {
- val workQueue = new LinkedBlockingQueue[Runnable]
-
- val threadFactory = new ThreadFactory {
- def newThread(r: Runnable): Thread = {
- val t = new Thread(r)
- t setDaemon daemon
- t
- }
- }
-
- new ThreadPoolExecutor(ThreadPoolConfig.corePoolSize,
- ThreadPoolConfig.maxPoolSize,
- 60000L,
- TimeUnit.MILLISECONDS,
- workQueue,
- threadFactory,
- new ThreadPoolExecutor.CallerRunsPolicy)
- }
-
-}
diff --git a/src/actors/scala/actors/scheduler/DelegatingScheduler.scala b/src/actors/scala/actors/scheduler/DelegatingScheduler.scala
index d1c99d7c13..193b1e3e2b 100644
--- a/src/actors/scala/actors/scheduler/DelegatingScheduler.scala
+++ b/src/actors/scala/actors/scheduler/DelegatingScheduler.scala
@@ -48,7 +48,7 @@ private[actors] trait DelegatingScheduler extends IScheduler {
}
}
- def newActor(actor: Reactor) = synchronized {
+ def newActor(actor: TrackedReactor) = synchronized {
val createNew = if (sched eq null)
true
else sched.synchronized {
@@ -65,9 +65,9 @@ private[actors] trait DelegatingScheduler extends IScheduler {
}
}
- def terminated(actor: Reactor) = impl.terminated(actor)
+ def terminated(actor: TrackedReactor) = impl.terminated(actor)
- def onTerminate(actor: Reactor)(f: => Unit) = impl.onTerminate(actor)(f)
+ def onTerminate(actor: TrackedReactor)(f: => Unit) = impl.onTerminate(actor)(f)
override def managedBlock(blocker: ManagedBlocker): Unit =
impl.managedBlock(blocker)
diff --git a/src/actors/scala/actors/scheduler/ExecutorScheduler.scala b/src/actors/scala/actors/scheduler/ExecutorScheduler.scala
index bcd524f345..8c29af604e 100644
--- a/src/actors/scala/actors/scheduler/ExecutorScheduler.scala
+++ b/src/actors/scala/actors/scheduler/ExecutorScheduler.scala
@@ -11,16 +11,57 @@
package scala.actors
package scheduler
-import java.util.concurrent.Callable
+import java.util.concurrent.{Callable, ExecutorService}
import scala.concurrent.ThreadPoolRunner
/**
+ * The <code>ExecutorScheduler</code> object is used to create
+ * <code>ExecutorScheduler</code> instances.
+ *
+ * @author Philipp Haller
+ */
+object ExecutorScheduler {
+
+ private def start(sched: ExecutorScheduler): ExecutorScheduler = {
+ sched.start()
+ sched
+ }
+
+ /** Creates an <code>ExecutorScheduler</code> using the provided
+ * <code>ExecutorService</code>.
+ *
+ * @param exec the executor to use
+ * @return the scheduler
+ */
+ def apply(exec: ExecutorService): ExecutorScheduler =
+ start(new ExecutorScheduler {
+ val executor: ExecutorService = exec
+ })
+
+ /** Creates an <code>ExecutorScheduler</code> using the provided
+ * <code>ExecutorService</code>.
+ *
+ * @param exec the executor to use
+ * @param term whether the scheduler should automatically terminate
+ * @return the scheduler
+ */
+ def apply(exec: ExecutorService, term: Boolean): ExecutorScheduler =
+ start(new ExecutorScheduler {
+ val executor: ExecutorService = exec
+ override val terminate = term
+ })
+
+}
+
+/**
* The <code>ExecutorScheduler</code> class uses an
* <code>ExecutorService</code> to execute <code>Actor</code>s.
*
* @author Philipp Haller
*/
-private[scheduler] trait ExecutorScheduler extends IScheduler with ThreadPoolRunner {
+trait ExecutorScheduler extends Thread
+ with IScheduler with TerminationService
+ with ThreadPoolRunner {
def execute(task: Runnable) {
super[ThreadPoolRunner].execute(task.asInstanceOf[Task[Unit]])
diff --git a/src/actors/scala/actors/scheduler/ForkJoinScheduler.scala b/src/actors/scala/actors/scheduler/ForkJoinScheduler.scala
index 54fab4fb11..ab4fca04ca 100644
--- a/src/actors/scala/actors/scheduler/ForkJoinScheduler.scala
+++ b/src/actors/scala/actors/scheduler/ForkJoinScheduler.scala
@@ -3,13 +3,15 @@ package scheduler
import java.util.{Collection, ArrayList}
import scala.concurrent.forkjoin._
+import scala.util.Random
/** The <code>ForkJoinScheduler</code> is backed by a lightweight
* fork-join task execution framework.
*
* @author Philipp Haller
*/
-class ForkJoinScheduler(val initCoreSize: Int, val maxSize: Int, daemon: Boolean) extends Runnable with IScheduler with TerminationMonitor {
+class ForkJoinScheduler(val initCoreSize: Int, val maxSize: Int, daemon: Boolean, fair: Boolean)
+ extends Runnable with IScheduler with TerminationMonitor {
private var pool = makeNewPool() // guarded by this
private var terminating = false // guarded by this
@@ -22,7 +24,7 @@ class ForkJoinScheduler(val initCoreSize: Int, val maxSize: Int, daemon: Boolean
protected val CHECK_FREQ = 10
def this(d: Boolean) {
- this(ThreadPoolConfig.corePoolSize, ThreadPoolConfig.maxPoolSize, d)
+ this(ThreadPoolConfig.corePoolSize, ThreadPoolConfig.maxPoolSize, d, true)
}
def this() {
@@ -65,12 +67,12 @@ class ForkJoinScheduler(val initCoreSize: Int, val maxSize: Int, daemon: Boolean
}
if (terminating)
- throw new QuitException
+ throw new QuitControl
if (allActorsTerminated) {
Debug.info(this+": all actors terminated")
terminating = true
- throw new QuitException
+ throw new QuitControl
}
if (!snapshoting) {
@@ -81,12 +83,12 @@ class ForkJoinScheduler(val initCoreSize: Int, val maxSize: Int, daemon: Boolean
Debug.info(this+": drained "+num+" tasks")
drainedTasks = list
terminating = true
- throw new QuitException
+ throw new QuitControl
}
}
}
} catch {
- case _: QuitException =>
+ case _: QuitControl =>
Debug.info(this+": initiating shutdown...")
while (!pool.isQuiescent()) {
try {
@@ -106,11 +108,11 @@ class ForkJoinScheduler(val initCoreSize: Int, val maxSize: Int, daemon: Boolean
}
override def executeFromActor(task: Runnable) {
- // TODO: only pass RecursiveAction (with Runnable), and cast to it
- val recAction = new RecursiveAction {
- def compute() = task.run()
- }
- recAction.fork()
+ // in fair mode: 2% chance of submitting to global task queue
+ if (fair && Random.nextInt(50) == 1)
+ pool.execute(task)
+ else
+ task.asInstanceOf[RecursiveAction].fork()
}
/** Submits a closure for execution.
diff --git a/src/actors/scala/actors/scheduler/QuitControl.scala b/src/actors/scala/actors/scheduler/QuitControl.scala
new file mode 100644
index 0000000000..b217094c1e
--- /dev/null
+++ b/src/actors/scala/actors/scheduler/QuitControl.scala
@@ -0,0 +1,19 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.actors.scheduler
+
+import scala.util.control.ControlThrowable
+
+/**
+ * The <code>QuitControl</code> class is used to manage control flow
+ * of certain schedulers.
+ *
+ * @author Philipp Haller
+ */
+private[scheduler] class QuitControl extends ControlThrowable
diff --git a/src/actors/scala/actors/scheduler/ResizableThreadPoolScheduler.scala b/src/actors/scala/actors/scheduler/ResizableThreadPoolScheduler.scala
index ff74d0bc8b..3687138e0e 100644
--- a/src/actors/scala/actors/scheduler/ResizableThreadPoolScheduler.scala
+++ b/src/actors/scala/actors/scheduler/ResizableThreadPoolScheduler.scala
@@ -112,7 +112,7 @@ class ResizableThreadPoolScheduler(protected val terminate: Boolean,
}
if (terminating)
- throw new QuitException
+ throw new QuitControl
if (!suspending) {
gc()
@@ -129,19 +129,19 @@ class ResizableThreadPoolScheduler(protected val terminate: Boolean,
Debug.info(this+": corePoolSize = "+coreSize+", maxPoolSize = "+maxSize)
terminating = true
- throw new QuitException
+ throw new QuitControl
}
}
} else {
drainedTasks = executor.shutdownNow()
Debug.info(this+": drained "+drainedTasks.size()+" tasks")
terminating = true
- throw new QuitException
+ throw new QuitControl
}
} // sync
}
} catch {
- case _: QuitException =>
+ case _: QuitControl =>
executor.shutdown()
// allow thread to exit
}
diff --git a/src/actors/scala/actors/scheduler/SchedulerService.scala b/src/actors/scala/actors/scheduler/SchedulerService.scala
deleted file mode 100644
index dca8bb5b9c..0000000000
--- a/src/actors/scala/actors/scheduler/SchedulerService.scala
+++ /dev/null
@@ -1,80 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-package scala.actors
-package scheduler
-
-import scala.util.control.ControlException
-import java.lang.{Runnable, Thread, InterruptedException}
-
-/**
- * The abstract <code>SchedulerService</code> class allows subclasses
- * to implement a custom <code>onShutdown</code> method, which is
- * invoked when the runtime system has detected that all actors have
- * been terminated.
- *
- * @version 0.9.18
- * @author Philipp Haller
- */
-abstract class SchedulerService(daemon: Boolean) extends Thread with IScheduler with ActorGC {
-
- setDaemon(daemon)
-
- def this() =
- this(false)
-
- private var terminating = false
-
- protected val CHECK_FREQ = 100
-
- def onShutdown(): Unit
-
- override def run() {
- try {
- while (true) {
- this.synchronized {
- try {
- wait(CHECK_FREQ)
- } catch {
- case _: InterruptedException =>
- }
- if (terminating)
- throw new QuitException
-
- gc()
-
- if (allActorsTerminated)
- throw new QuitException
- }
- }
- } catch {
- case _: QuitException =>
- Debug.info(this+": initiating shutdown...")
- // invoke shutdown hook
- onShutdown()
- // allow thread to exit
- }
- }
-
- /** Shuts down the scheduler.
- */
- def shutdown(): Unit = synchronized {
- terminating = true
- }
-}
-
-/**
- * The <code>QuitException</code> class is used to manage control flow
- * of certain schedulers and worker threads.
- *
- * @version 0.9.8
- * @author Philipp Haller
- */
-private[actors] class QuitException extends Throwable with ControlException
diff --git a/src/actors/scala/actors/scheduler/SimpleExecutorScheduler.scala b/src/actors/scala/actors/scheduler/SimpleExecutorScheduler.scala
deleted file mode 100644
index 6c8814e90c..0000000000
--- a/src/actors/scala/actors/scheduler/SimpleExecutorScheduler.scala
+++ /dev/null
@@ -1,43 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-package scala.actors
-package scheduler
-
-import java.util.concurrent.ExecutorService
-
-/**
- * The <code>SimpleExecutorScheduler</code> class uses an
- * <code>ExecutorService</code> to execute <code>Actor</code>s. It
- * does not start an additional thread.
- *
- * A <code>SimpleExecutorScheduler</code> attempts to shut down
- * the underlying <code>ExecutorService</code> only if
- * <code>terminate</code> is set to true.
- *
- * Otherwise, the <code>ExecutorService</code> must be shut down either
- * directly or by shutting down the
- * <code>SimpleExecutorScheduler</code> instance.
- *
- * @author Philipp Haller
- */
-class SimpleExecutorScheduler(protected var executor: ExecutorService,
- protected var terminate: Boolean)
- extends TerminationService(terminate) with ExecutorScheduler {
-
- /* This constructor (and the var above) is currently only used to work
- * around a bug in scaladoc, which cannot deal with early initializers
- * (to be used in subclasses such as DefaultExecutorScheduler) properly.
- */
- def this() {
- this(null, true)
- }
-
-}
diff --git a/src/actors/scala/actors/scheduler/SingleThreadedScheduler.scala b/src/actors/scala/actors/scheduler/SingleThreadedScheduler.scala
index 9eca972e90..f91351d1f5 100644
--- a/src/actors/scala/actors/scheduler/SingleThreadedScheduler.scala
+++ b/src/actors/scala/actors/scheduler/SingleThreadedScheduler.scala
@@ -54,11 +54,11 @@ class SingleThreadedScheduler extends IScheduler {
isShutdown = true
}
- def newActor(actor: Reactor) {}
- def terminated(actor: Reactor) {}
+ def newActor(actor: TrackedReactor) {}
+ def terminated(actor: TrackedReactor) {}
// TODO: run termination handlers at end of shutdown.
- def onTerminate(actor: Reactor)(f: => Unit) {}
+ def onTerminate(actor: TrackedReactor)(f: => Unit) {}
def isActive =
!isShutdown
diff --git a/src/actors/scala/actors/scheduler/TerminationMonitor.scala b/src/actors/scala/actors/scheduler/TerminationMonitor.scala
index 5510ebb095..82897f7afd 100644
--- a/src/actors/scala/actors/scheduler/TerminationMonitor.scala
+++ b/src/actors/scala/actors/scheduler/TerminationMonitor.scala
@@ -13,14 +13,15 @@ package scheduler
import scala.collection.mutable.HashMap
-trait TerminationMonitor {
+private[scheduler] trait TerminationMonitor {
+ _: IScheduler =>
protected var activeActors = 0
- protected val terminationHandlers = new HashMap[Reactor, () => Unit]
+ protected val terminationHandlers = new HashMap[TrackedReactor, () => Unit]
private var started = false
/** newActor is invoked whenever a new actor is started. */
- def newActor(a: Reactor) = synchronized {
+ def newActor(a: TrackedReactor) = synchronized {
activeActors += 1
if (!started)
started = true
@@ -32,7 +33,7 @@ trait TerminationMonitor {
* @param a the actor
* @param f the closure to be registered
*/
- def onTerminate(a: Reactor)(f: => Unit): Unit = synchronized {
+ def onTerminate(a: TrackedReactor)(f: => Unit): Unit = synchronized {
terminationHandlers += (a -> (() => f))
}
@@ -40,7 +41,7 @@ trait TerminationMonitor {
*
* @param a the actor that has terminated
*/
- def terminated(a: Reactor) = {
+ def terminated(a: TrackedReactor) = {
// obtain termination handler (if any)
val todo = synchronized {
terminationHandlers.get(a) match {
diff --git a/src/actors/scala/actors/scheduler/TerminationService.scala b/src/actors/scala/actors/scheduler/TerminationService.scala
index b6f03f11e5..aa047eedfa 100644
--- a/src/actors/scala/actors/scheduler/TerminationService.scala
+++ b/src/actors/scala/actors/scheduler/TerminationService.scala
@@ -11,7 +11,7 @@
package scala.actors
package scheduler
-import java.lang.{Runnable, Thread, InterruptedException}
+import java.lang.{Thread, InterruptedException}
/**
* The <code>TerminationService</code> class starts a new thread
@@ -21,11 +21,16 @@ import java.lang.{Runnable, Thread, InterruptedException}
*
* @author Philipp Haller
*/
-abstract class TerminationService(terminate: Boolean)
- extends Thread with IScheduler with TerminationMonitor {
+private[scheduler] trait TerminationService extends TerminationMonitor {
+ _: Thread with IScheduler =>
private var terminating = false
+ /** Indicates whether the scheduler should terminate when all
+ * actors have terminated.
+ */
+ protected val terminate = true
+
protected val CHECK_FREQ = 50
def onShutdown(): Unit
@@ -39,15 +44,15 @@ abstract class TerminationService(terminate: Boolean)
} catch {
case _: InterruptedException =>
}
- if (terminating)
- throw new QuitException
- if (terminate && allActorsTerminated)
- throw new QuitException
+ if (terminating || (terminate && allActorsTerminated))
+ throw new QuitControl
+
+ gc()
}
}
} catch {
- case _: QuitException =>
+ case _: QuitControl =>
Debug.info(this+": initiating shutdown...")
// invoke shutdown hook
onShutdown()
@@ -60,4 +65,5 @@ abstract class TerminationService(terminate: Boolean)
def shutdown(): Unit = synchronized {
terminating = true
}
+
}
diff --git a/src/actors/scala/actors/scheduler/ThreadPoolConfig.scala b/src/actors/scala/actors/scheduler/ThreadPoolConfig.scala
index 93112ae80a..6efe9b007d 100644
--- a/src/actors/scala/actors/scheduler/ThreadPoolConfig.scala
+++ b/src/actors/scala/actors/scheduler/ThreadPoolConfig.scala
@@ -11,22 +11,19 @@
package scala.actors
package scheduler
+import util.Properties.{ javaVersion, javaVmVendor, isJavaAtLeast, propIsSetTo, propOrNone }
+
/**
* @author Erik Engbrecht
+ * @author Philipp Haller
*/
-object ThreadPoolConfig {
+private[actors] object ThreadPoolConfig {
private val rt = Runtime.getRuntime()
private val minNumThreads = 4
- private def getIntegerProp(propName: String): Option[Int] = {
- try {
- val prop = System.getProperty(propName)
- Some(Integer.parseInt(prop))
- } catch {
- case ace: java.security.AccessControlException => None
- case nfe: NumberFormatException => None
- }
- }
+ private def getIntegerProp(propName: String): Option[Int] =
+ try propOrNone(propName) map (_.toInt)
+ catch { case _: SecurityException | _: NumberFormatException => None }
val corePoolSize = getIntegerProp("actors.corePoolSize") match {
case Some(i) if i > 0 => i
@@ -37,30 +34,21 @@ object ThreadPoolConfig {
}
val maxPoolSize = {
- val preMaxSize = getIntegerProp("actors.maxPoolSize") match {
- case Some(i) => i
- case _ => 256
- }
+ val preMaxSize = getIntegerProp("actors.maxPoolSize") getOrElse 256
if (preMaxSize >= corePoolSize) preMaxSize else corePoolSize
}
private[actors] def useForkJoin: Boolean =
- try {
- val fjProp = System.getProperty("actors.enableForkJoin")
- if (fjProp != null)
- fjProp.equals("true")
- else {
- val javaVersion = System.getProperty("java.version")
- val jvmVendor = System.getProperty("java.vm.vendor")
+ try !propIsSetTo("actors.enableForkJoin", "false") &&
+ (propIsSetTo("actors.enableForkJoin", "true") || {
Debug.info(this+": java.version = "+javaVersion)
- Debug.info(this+": java.vm.vendor = "+jvmVendor)
- (javaVersion.indexOf("1.6") != -1 ||
- javaVersion.indexOf("1.7") != -1) &&
+ Debug.info(this+": java.vm.vendor = "+javaVmVendor)
+
// on IBM J9 1.6 do not use ForkJoinPool
- (jvmVendor.indexOf("Sun") != -1)
- }
- } catch {
- case se: SecurityException => false
+ // XXX this all needs to go into Properties.
+ isJavaAtLeast("1.6") && ((javaVmVendor contains "Sun") || (javaVmVendor contains "Apple"))
+ })
+ catch {
+ case _: SecurityException => false
}
-
}
diff --git a/src/actors/scala/actors/scheduler/ThreadPoolScheduler.scala b/src/actors/scala/actors/scheduler/ThreadPoolScheduler.scala
deleted file mode 100644
index 2eb18645ff..0000000000
--- a/src/actors/scala/actors/scheduler/ThreadPoolScheduler.scala
+++ /dev/null
@@ -1,81 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-package scala.actors.scheduler
-
-import java.util.concurrent.ThreadPoolExecutor
-import scala.actors.Debug
-import scala.concurrent.ManagedBlocker
-
-/**
- * The <code>ThreadPoolScheduler</code> class uses a
- * <code>ThreadPoolExecutor</code> to execute <code>Actor</code>s.
- *
- * A <code>ThreadPoolScheduler</code> attempts to shut down
- * the underlying <code>ThreadPoolExecutor</code> only if
- * <code>terminate</code> is set to true.
- *
- * Otherwise, the <code>ThreadPoolExecutor</code> must be shut down
- * either directly or by shutting down the
- * <code>ThreadPoolScheduler</code> instance.
- *
- * @author Philipp Haller
- */
-class ThreadPoolScheduler(protected var executor: ThreadPoolExecutor,
- protected val terminate: Boolean,
- protected val daemon: Boolean)
- extends Thread with ExecutorScheduler with TerminationMonitor {
-
- setDaemon(daemon)
-
- private var terminating = false // guarded by this
- protected val CHECK_FREQ = 10
-
- /* This constructor (and the var above) is currently only used to work
- * around a bug in scaladoc, which cannot deal with early initializers
- * (to be used in subclasses such as DefaultThreadPoolScheduler)
- * properly.
- */
- def this(d: Boolean) {
- this(null, true, d)
- }
-
- override def run() {
- try {
- while (true) {
- this.synchronized {
- try {
- wait(CHECK_FREQ)
- } catch {
- case _: InterruptedException =>
- }
-
- if (terminating || (terminate && allActorsTerminated))
- throw new QuitException
-
- gc()
- }
- }
- } catch {
- case _: QuitException =>
- Debug.info(this+": initiating shutdown...")
- // invoke shutdown hook
- onShutdown()
- // allow thread to exit
- }
- }
-
- /** Shuts down the scheduler.
- */
- def shutdown(): Unit = synchronized {
- terminating = true
- }
-
-}
diff --git a/src/attic/README b/src/attic/README
new file mode 100644
index 0000000000..9fb600ae57
--- /dev/null
+++ b/src/attic/README
@@ -0,0 +1,2 @@
+This is a holding area for source files which aren't used in
+trunk anymore but which we're keeping available for a time. \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/models/Models.scala b/src/attic/scala/tools/nsc/models/Models.scala
index 2c44c290ae..438a9d3f2c 100644
--- a/src/compiler/scala/tools/nsc/models/Models.scala
+++ b/src/attic/scala/tools/nsc/models/Models.scala
@@ -8,7 +8,6 @@ package scala.tools.nsc
package models
import scala.tools.nsc.Global
-import scala.tools.nsc.util.{Position,NoPosition}
/** This abstract class ...
*
@@ -263,7 +262,6 @@ abstract class Models {
}
abstract class ValOrDefMod(parent0: Composite) extends MemberComposite(parent0) with HasClassObjects {
- def treey = tree.asInstanceOf[ValOrDefDef]
override def replacedBy(tree0: Tree): Boolean =
super.replacedBy(tree0) && tree0.isInstanceOf[ValOrDefDef]
@@ -297,7 +295,6 @@ abstract class Models {
abstract class ImplMod(parent0: Composite)
extends MemberComposite(parent0) with HasClassObjects {
- def treey = tree.asInstanceOf[ImplDef]
override def replacedBy(tree0: Tree): Boolean =
super.replacedBy(tree0) && tree0.isInstanceOf[ImplDef]
override def isMember(tree: Tree): Boolean = (super.isMember(tree) ||
@@ -362,7 +359,6 @@ abstract class Models {
super.replacedBy(tree0) && tree0.isInstanceOf[ModuleDef]
}
class TypeMod(parent0: Composite) extends MemberMod(parent0) {
- def treey = tree.asInstanceOf[TypeDef];
override def replacedBy(tree0 : Tree) : Boolean = (super.replacedBy(tree0) && tree0.isInstanceOf[TypeDef]);
}
def SourceMod(original: CompilationUnit) = new SourceMod(original)
diff --git a/src/compiler/scala/tools/nsc/models/SemanticTokens.scala b/src/attic/scala/tools/nsc/models/SemanticTokens.scala
index 2fae919614..4da23b358b 100644
--- a/src/compiler/scala/tools/nsc/models/SemanticTokens.scala
+++ b/src/attic/scala/tools/nsc/models/SemanticTokens.scala
@@ -14,8 +14,8 @@ import scala.collection.mutable.{HashMap, HashSet}
import scala.tools.nsc.Global
import scala.tools.nsc.symtab.{Flags, Names}
import scala.tools.nsc.symtab.Flags.DEFERRED
-import scala.tools.nsc.util.{BatchSourceFile, NoPosition, Position, SourceFile}
-import scala.util.NameTransformer
+import scala.tools.nsc.util.{BatchSourceFile, SourceFile}
+import scala.reflect.NameTransformer
class SemanticTokens(val compiler: Global) {
import compiler._
@@ -262,7 +262,6 @@ class SemanticTokens(val compiler: Global) {
build(arg.tpt);
}
}
- try {
//TPT=scala.Iterator[DocGenerator.this.compiler0.CompilationUnit] 260 class scala.tools.nsc.ast.Trees$TypeTree scala.Iterator[DocGenerator.this.compiler0.CompilationUnit] class scala.tools.nsc.symtab.Types$$anon$5
if ((tree.tpt eq null) || (tree.tpt.tpe eq null)) {
//Console.err.println("BAD: " + tree.tpt + " in " + tree);
@@ -270,11 +269,6 @@ class SemanticTokens(val compiler: Global) {
//Console.err.println("TPT=" + tree.tpt + " " + tree.tpt.pos + " " + tree.tpt.getClass() + " " + tree.tpt.tpe + " " + tree.tpt.tpe.getClass() + " " + tree.tpt.tpe.getClass().getSuperclass());
build(tree.tpt);
}
- } catch {
- case e: Error =>
- Console.err.println("VALDEF: " + tree + " " + tree.tpt + " " + tree.pos + " " + tree.tpt.pos);
- throw e;
- }
//Console.err.println("RHS: " + tree.rhs + " " + tree.rhs.getClass() + " " + tree.rhs.getClass().getSuperclass());
build(tree.rhs);
}
@@ -554,7 +548,7 @@ class SemanticTokens(val compiler: Global) {
Console.err.println("" + tree + "@" + tree.pos + " not in " +
unit.source.file.name + "[" + buf.length + "]");
Thread.dumpStack()
- throw new Error()
+ abort()
}
return 0
}
@@ -621,7 +615,7 @@ class SemanticTokens(val compiler: Global) {
Console.err.println("OFFSET=" + offset + " " + tok + " " + tok.length);
Console.err.println(" " + cursor.offset + " " + gap.length);
gap.length0 = offset - cursor.offset + tok.length
- //throw new Error();
+ //abort();
}
if (offset == cursor.offset) {
// replace or prepend
diff --git a/src/compiler/scala/tools/nsc/models/Signatures.scala b/src/attic/scala/tools/nsc/models/Signatures.scala
index fa93e3bfcb..2a94a1cfae 100644
--- a/src/compiler/scala/tools/nsc/models/Signatures.scala
+++ b/src/attic/scala/tools/nsc/models/Signatures.scala
@@ -11,7 +11,7 @@ import scala.collection.mutable.{HashMap, HashSet}
import scala.tools.nsc.{Global => Compiler}
import scala.tools.nsc.symtab.{Flags, Names}
import scala.tools.nsc.util.{ Position, SourceFile }
-import scala.util.NameTransformer
+import scala.reflect.NameTransformer
/** This class ...
*
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolWalker.scala b/src/attic/scala/tools/nsc/symtab/SymbolWalker.scala
index f2e16b537a..8c111875dd 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolWalker.scala
+++ b/src/attic/scala/tools/nsc/symtab/SymbolWalker.scala
@@ -3,7 +3,6 @@ package symtab
trait SymbolWalker {
val global : Global
- import scala.tools.nsc.util._
import global._
import scala.collection.mutable.LinkedHashSet
trait Visitor {
@@ -33,8 +32,6 @@ trait SymbolWalker {
def fs(l: List[Tree]) = l foreach f
def fss(l: List[List[Tree]]) = l foreach fs
- if (t.isInstanceOf[StubTree]) return
-
val sym = (t, t.tpe) match {
case (Super(_,_),SuperType(_,supertp)) if validSym(supertp) => supertp.typeSymbol
case _ if validSym(t) => t.symbol
diff --git a/src/build/maven/continuations-plugin-pom.xml b/src/build/maven/continuations-plugin-pom.xml
new file mode 100644
index 0000000000..0277b899ed
--- /dev/null
+++ b/src/build/maven/continuations-plugin-pom.xml
@@ -0,0 +1,51 @@
+<project
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.scala-lang.plugins</groupId>
+ <artifactId>continuations</artifactId>
+ <packaging>jar</packaging>
+ <version>@VERSION@</version>
+ <url>http://www.scala-lang.org/</url>
+ <inceptionYear>2010</inceptionYear>
+ <organization>
+ <name>LAMP/EPFL</name>
+ <url>http://lamp.epfl.ch/</url>
+ </organization>
+ <licenses>
+ <license>
+ <name>BSD-like</name>
+ <url>http://www.scala-lang.org/downloads/license.html
+ </url>
+ <distribution>repo</distribution>
+ </license>
+ </licenses>
+ <scm>
+ <connection>scm:svn:http://lampsvn.epfl.ch/svn-repos/scala/scala/trunk</connection>
+ <url>https://lampsvn.epfl.ch/trac/scala/browser/scala/trunk</url>
+ </scm>
+ <issueManagement>
+ <system>trac</system>
+ <url>http://lampsvn.epfl.ch/trac/scala
+ </url>
+ </issueManagement>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scala-compiler</artifactId>
+ <version>@VERSION@</version>
+ </dependency>
+ </dependencies>
+ <distributionManagement>
+ <repository>
+ <id>scala-tools.org</id>
+ <url>@RELEASE_REPOSITORY@</url>
+ </repository>
+ <snapshotRepository>
+ <id>scala-tools.org</id>
+ <url>@SNAPSHOT_REPOSITORY@</url>
+ <uniqueVersion>false</uniqueVersion>
+ </snapshotRepository>
+ </distributionManagement>
+</project>
diff --git a/src/build/maven/maven-deploy.xml b/src/build/maven/maven-deploy.xml
index 4075c25ae0..b1d75f171d 100644
--- a/src/build/maven/maven-deploy.xml
+++ b/src/build/maven/maven-deploy.xml
@@ -45,6 +45,21 @@
<artifact:pom id="@{name}.pom" file="@{name}/@{name}-pom-fixed.xml" />
</sequential>
</macrodef>
+
+ <macrodef name="make-pom-plugin">
+ <attribute name="name" />
+ <attribute name="version" />
+ <sequential>
+ <copy file="plugins/@{name}/@{name}-plugin-pom.xml" tofile="plugins/@{name}/@{name}-pom-fixed.xml" overwrite="true">
+ <filterset>
+ <filter token="VERSION" value="@{version}" />
+ <filter token="RELEASE_REPOSITORY" value="${remote.release.repository}" />
+ <filter token="SNAPSHOT_REPOSITORY" value="${remote.snapshot.repository}" />
+ </filterset>
+ </copy>
+ <artifact:pom id="plugin-@{name}.pom" file="plugins/@{name}/@{name}-pom-fixed.xml" />
+ </sequential>
+ </macrodef>
<!-- Simply attaching documentation -->
<macrodef name="attach-doc">
<attribute name="name" />
@@ -72,6 +87,23 @@
</sequential>
</macrodef>
+ <!-- Deploy compiler plugins -->
+ <macrodef name="deploy-local-plugin">
+ <attribute name="name" />
+ <attribute name="version" />
+ <attribute name="repository" />
+ <element name="extra-attachments" optional="yes" />
+ <sequential>
+ <make-pom-plugin name="@{name}" version="@{version}" />
+ <artifact:install file="plugins/@{name}/@{name}.jar">
+ <artifact:pom refid="plugin-@{name}.pom" />
+ <artifact:localRepository path="@{repository}" id="${repository.credentials.id}" />
+ <extra-attachments />
+ </artifact:install>
+ </sequential>
+ </macrodef>
+
+
<!-- Deploy all artifacts locally -->
<macrodef name="deploy-local-all">
<attribute name="repository" />
@@ -87,6 +119,7 @@
<deploy-local name="scala-swing" version="@{version}" repository="@{repository}"/>
<deploy-local name="scalap" version="@{version}" repository="@{repository}"/>
<deploy-local name="scala-partest" version="@{version}" repository="@{repository}"/>
+ <deploy-local-plugin name="continuations" version="@{version}" repository="@{repository}"/>
<!-- scala swing api is included in main library api
<extra-attachments>
<artifact:attach type="jar" file="scala-swing/scala-swing-docs.jar" classifier="javadoc" />
@@ -116,6 +149,23 @@
</sequential>
</macrodef>
+
+ <!-- Deploy compiler plugins -->
+ <macrodef name="deploy-remote-plugin">
+ <attribute name="name" />
+ <attribute name="version" />
+ <attribute name="repository" />
+ <element name="extra-attachments" optional="yes" />
+ <sequential>
+ <make-pom-plugin name="@{name}" version="@{version}" />
+ <artifact:deploy file="plugins/@{name}/@{name}.jar" settingsFile="${settings.file}">
+ <artifact:pom refid="plugin-@{name}.pom" />
+ <artifact:remoteRepository url="@{repository}" id="${repository.credentials.id}" />
+ <extra-attachments />
+ </artifact:deploy>
+ </sequential>
+ </macrodef>
+
<!-- Deploy all artifacts locally -->
<macrodef name="deploy-remote-all">
<attribute name="repository" />
@@ -131,6 +181,7 @@
<deploy-remote name="scala-swing" version="@{version}" repository="@{repository}"/>
<deploy-remote name="scalap" version="@{version}" repository="@{repository}"/>
<deploy-remote name="scala-partest" version="@{version}" repository="@{repository}"/>
+ <deploy-remote-plugin name="continuations" version="@{version}" repository="@{repository}"/>
<!-- scala swing api is included in main library api
<extra-attachments>
<artifact:attach type="jar" file="scala-swing/scala-swing-docs.jar" classifier="javadoc" />
diff --git a/src/build/pack.xml b/src/build/pack.xml
index 6239e61187..5c5c8fbd8d 100644
--- a/src/build/pack.xml
+++ b/src/build/pack.xml
@@ -194,7 +194,25 @@ MAIN DISTRIBUTION SBAZ
<mvn-copy-lib mvn.artifact.name="scalap"/>
</target>
- <target name="pack-maven.docs" depends="pack-maven.libs">
+ <target name="pack-maven.plugins" depends="pack-maven.start">
+ <macrodef name="mvn-copy-plugin">
+ <attribute name="mvn.artifact.name"/>
+ <sequential>
+ <mkdir dir="${dists.dir}/maven/${version.number}/plugins/@{mvn.artifact.name}"/>
+ <copy todir="${dists.dir}/maven/${version.number}/plugins/@{mvn.artifact.name}">
+ <fileset dir="${dist.dir}/plugins/">
+ <filename name="@{mvn.artifact.name}.jar"/>
+ </fileset>
+ <fileset dir="${src.dir}/build/maven/">
+ <filename name="@{mvn.artifact.name}-plugin-pom.xml"/>
+ </fileset>
+ </copy>
+ </sequential>
+ </macrodef>
+ <mvn-copy-plugin mvn.artifact.name="continuations"/>
+ </target>
+
+ <target name="pack-maven.docs" depends="pack-maven.libs, pack-maven.plugins">
<jar destfile="${dists.dir}/maven/${version.number}/scala-library/scala-library-docs.jar"
basedir="${build-docs.dir}/library">
<include name="**/*"/>
diff --git a/src/compiler/scala/tools/ant/Pack200Task.scala b/src/compiler/scala/tools/ant/Pack200Task.scala
index d9aa8a1f5f..12a8706a1c 100644
--- a/src/compiler/scala/tools/ant/Pack200Task.scala
+++ b/src/compiler/scala/tools/ant/Pack200Task.scala
@@ -71,7 +71,7 @@ class Pack200Task extends MatchingTask {
* is used to remove empty packages and improve pack200 optimization.
* @param keep
* true to retain file ordering.
- * false to optomize directory structure (DEFAULT). */
+ * false to optimize directory structure (DEFAULT). */
def setKeepFileOrder(x: Boolean) { keepFileOrder = x }
/** If false, a single modification time is used for all contained files */
@@ -99,7 +99,7 @@ class Pack200Task extends MatchingTask {
\*============================================================================*/
/** Gets the list of individual JAR files for processing.
- * @returns The list of JAR files */
+ * @return The list of JAR files */
private def getFileList: List[File] = {
var files: List[File] = Nil
val fs = getImplicitFileSet
diff --git a/src/compiler/scala/tools/ant/ScalaBazaar.scala b/src/compiler/scala/tools/ant/ScalaBazaar.scala
index 30fd8947f5..f560883a2d 100644
--- a/src/compiler/scala/tools/ant/ScalaBazaar.scala
+++ b/src/compiler/scala/tools/ant/ScalaBazaar.scala
@@ -177,19 +177,19 @@ package scala.tools.ant {
\******************************************************************************/
/** Gets the value of the file attribute in a Scala-friendly form.
- * @returns The file as a file. */
+ * @return The file as a file. */
private def getName: String =
if (name.isEmpty) error("Name attribute must be defined first.")
else name.get
/** Gets the value of the file attribute in a Scala-friendly form.
- * @returns The file as a file. */
+ * @return The file as a file. */
private def getFile: File =
if (file.isEmpty) error("Member 'file' is empty.")
else getProject().resolveFile(file.get.toString())
/** Gets the value of the adfile attribute in a Scala-friendly form.
- * @returns The adfile as a file. */
+ * @return The adfile as a file. */
private def getAdfile: File =
if (adfile.isEmpty) error("Member 'adfile' is empty.")
else getProject().resolveFile(adfile.get.toString())
diff --git a/src/compiler/scala/tools/ant/ScalaTool.scala b/src/compiler/scala/tools/ant/ScalaTool.scala
index e7912d8b59..90f1fcaeda 100644
--- a/src/compiler/scala/tools/ant/ScalaTool.scala
+++ b/src/compiler/scala/tools/ant/ScalaTool.scala
@@ -72,7 +72,7 @@ class ScalaTool extends MatchingTask {
private var classpathPath: Path = emptyPath
/** Comma-separated Java system properties to pass to the JRE. Properties
- * are formated as name=value. Properties scala.home, scala.tool.name and
+ * are formatted as name=value. Properties scala.home, scala.tool.name and
* scala.tool.version are always set. */
private var properties: List[(String, String)] = Nil
@@ -166,12 +166,12 @@ class ScalaTool extends MatchingTask {
\*============================================================================*/
/** Gets the value of the classpath attribute in a Scala-friendly form.
- * @returns The class path as a list of files. */
+ * @return The class path as a list of files. */
private def getUnixclasspath: String =
transposeVariableMarkup(classpath.mkString("", ":", "").replace('\\', '/'), "${", "}")
/** Gets the value of the classpath attribute in a Scala-friendly form.
- * @returns The class path as a list of files. */
+ * @return The class path as a list of files. */
private def getWinclasspath: String =
transposeVariableMarkup(classpath.mkString("", ";", "").replace('/', '\\'), "%", "%")
diff --git a/src/compiler/scala/tools/ant/Scalac.scala b/src/compiler/scala/tools/ant/Scalac.scala
index 8bd5444ee3..4ecfc3dc85 100644
--- a/src/compiler/scala/tools/ant/Scalac.scala
+++ b/src/compiler/scala/tools/ant/Scalac.scala
@@ -69,7 +69,7 @@ import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
*
* @author Gilles Dubochet, Stephane Micheloud
*/
-class Scalac extends MatchingTask {
+class Scalac extends MatchingTask with ScalacShared {
/** The unique Ant file utilities instance to use in this task. */
private val fileUtils = FileUtils.getFileUtils()
@@ -563,7 +563,7 @@ class Scalac extends MatchingTask {
log("Scalac params = '" + addParams + "'", Project.MSG_DEBUG)
// let CompilerCommand processes all params
- val command = new CompilerCommand(settings.splitParams(addParams), settings, error, false)
+ val command = new CompilerCommand(settings.splitParams(addParams), settings)
// resolve dependenciesFile path from project's basedir, so <ant antfile ...> call from other project works.
// the dependenciesFile may be relative path to basedir or absolute path, in either case, the following code
@@ -606,7 +606,7 @@ class Scalac extends MatchingTask {
}
java setClasspath scalacPath
- java setClassname "scala.tools.nsc.Main"
+ java setClassname MainClass
// Write all settings to a temporary file
def writeSettings() : File = {
@@ -616,20 +616,16 @@ class Scalac extends MatchingTask {
val out = new PrintWriter(new BufferedWriter(new FileWriter(file)))
try {
- for (setting <- settings.allSettings ; arg <- setting.unparse)
+ for (setting <- settings.visibleSettings ; arg <- setting.unparse)
out println escapeArgument(arg)
for (file <- sourceFiles)
- out println file.getAbsolutePath
+ out println escapeArgument(file.getAbsolutePath)
}
finally out.close()
file
}
-
- java.createArg() setValue ("@" + writeSettings.getCanonicalPath)
- log(java.getCommandLine.getCommandline.mkString(" "), Project.MSG_VERBOSE)
-
- val res = java.executeJava()
+ val res = execWithArgFiles(java, List(writeSettings.getCanonicalPath))
if (failonerror && res != 0)
error("Compilation failed because of an internal compiler error;"+
" see the error output for details.")
diff --git a/src/compiler/scala/tools/ant/ScalacShared.scala b/src/compiler/scala/tools/ant/ScalacShared.scala
new file mode 100644
index 0000000000..356fb99e56
--- /dev/null
+++ b/src/compiler/scala/tools/ant/ScalacShared.scala
@@ -0,0 +1,25 @@
+/* __ *\
+** ________ ___ / / ___ Scala Ant Tasks **
+** / __/ __// _ | / / / _ | (c) 2005-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.tools.ant
+
+import org.apache.tools.ant.Project
+import org.apache.tools.ant.taskdefs.{ MatchingTask, Java }
+import scala.tools.nsc.io
+
+trait ScalacShared extends MatchingTask {
+ val MainClass = "scala.tools.nsc.Main"
+
+ def execWithArgFiles(java: Java, paths: List[String]) = {
+ paths foreach (p => java.createArg() setValue ("@"+ p))
+
+ val debugString = paths map (x => " (@ = '%s')".format(io.File(x).slurp())) mkString ""
+ log(java.getCommandLine.getCommandline.mkString("", " ", debugString), Project.MSG_VERBOSE)
+ java.executeJava()
+ }
+}
diff --git a/src/compiler/scala/tools/ant/Scaladoc.scala b/src/compiler/scala/tools/ant/Scaladoc.scala
index 78093ae95a..03dc99be68 100644
--- a/src/compiler/scala/tools/ant/Scaladoc.scala
+++ b/src/compiler/scala/tools/ant/Scaladoc.scala
@@ -107,6 +107,9 @@ class Scaladoc extends MatchingTask {
/** The document version, to be added to the title. */
private var docversion: Option[String] = None
+ /** Instruct the compiler to generate links to sources */
+ private var docsourceurl: Option[String] = None
+
/** Instruct the compiler to use additional parameters */
private var addParams: String = ""
@@ -264,6 +267,22 @@ class Scaladoc extends MatchingTask {
encoding = Some(input)
}
+ /** Sets the <code>docversion</code> attribute.
+ *
+ * @param input The value of <code>docversion</code>.
+ */
+ def setDocversion(input: String) {
+ docversion = Some(input)
+ }
+
+ /** Sets the <code>docsourceurl</code> attribute.
+ *
+ * @param input The value of <code>docsourceurl</code>.
+ */
+ def setDocsourceurl(input: String) {
+ docsourceurl = Some(input)
+ }
+
/** Sets the <code>doctitle</code> attribute.
*
* @param input The value of <code>doctitle</code>.
@@ -497,21 +516,12 @@ class Scaladoc extends MatchingTask {
if (!encoding.isEmpty) docSettings.encoding.value = encoding.get
if (!doctitle.isEmpty) docSettings.doctitle.value = decodeEscapes(doctitle.get)
if (!docversion.isEmpty) docSettings.docversion.value = decodeEscapes(docversion.get)
+ if (!docsourceurl.isEmpty) docSettings.docsourceurl.value =decodeEscapes(docsourceurl.get)
docSettings.deprecation.value = deprecation
docSettings.unchecked.value = unchecked
log("Scaladoc params = '" + addParams + "'", Project.MSG_DEBUG)
- var args = docSettings.splitParams(addParams)
-
- while (!args.isEmpty) {
- if (args.head startsWith "-") {
- val args0 = args
- args = docSettings.parseParams(args)
- if (args0 eq args) error("Parameter '" + args.head + "' is not recognised by Scaladoc.")
- }
- else if (args.head == "") args = args.tail
- else error("Parameter '" + args.head + "' does not start with '-'.")
- }
+ docSettings processArgumentString addParams
Pair(docSettings, sourceFiles)
}
diff --git a/src/compiler/scala/tools/ant/sabbus/ForeignCompiler.scala b/src/compiler/scala/tools/ant/sabbus/ForeignCompiler.scala
index 543ea10cc7..2263196af4 100644
--- a/src/compiler/scala/tools/ant/sabbus/ForeignCompiler.scala
+++ b/src/compiler/scala/tools/ant/sabbus/ForeignCompiler.scala
@@ -32,7 +32,7 @@ class ForeignCompiler {
private lazy val nsc: Global = {
try {
- val command = new CompilerCommand(args.toList, settings, error, false)
+ val command = new CompilerCommand(args.toList, settings)
new Global(command.settings, reporter)
}
catch {
@@ -42,7 +42,7 @@ class ForeignCompiler {
}
def compile(files: Array[File]): Int = {
- val command = new CompilerCommand(files.toList.map(_.toString), settings, error, true)
+ val command = new CompilerCommand(files.toList map (_.toString), settings)
(new nsc.Run) compile command.files
reporter.ERROR.count << 16 | reporter.WARNING.count
}
diff --git a/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala b/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala
index 77cc11fe6b..353499ddb3 100644
--- a/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala
+++ b/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala
@@ -8,15 +8,21 @@
// $Id$
-package scala.tools.ant.sabbus
+package scala.tools.ant
+package sabbus
import java.io.File
import java.io.FileWriter
import org.apache.tools.ant.Project
-import org.apache.tools.ant.taskdefs.{MatchingTask, Java}
-import org.apache.tools.ant.util.{GlobPatternMapper, SourceFileScanner}
+import org.apache.tools.ant.taskdefs.{ MatchingTask, Java }
+import org.apache.tools.ant.util.{ GlobPatternMapper, SourceFileScanner }
+import scala.tools.nsc.io
+import scala.tools.nsc.util.ScalaClassLoader
+
+class ScalacFork extends MatchingTask with ScalacShared with TaskArgs {
+ private def originOfThis: String =
+ ScalaClassLoader.originOfClass(classOf[ScalacFork]) map (_.toString) getOrElse "<unknown>"
-class ScalacFork extends MatchingTask with TaskArgs {
def setSrcdir(input: File) {
sourceDir = Some(input)
}
@@ -43,67 +49,73 @@ class ScalacFork extends MatchingTask with TaskArgs {
private var jvmArgs: Option[String] = None
private var argfile: Option[File] = None
+ private def createMapper() = {
+ val mapper = new GlobPatternMapper()
+ val extension = if (isMSIL) "*.msil" else "*.class"
+ mapper setTo extension
+ mapper setFrom "*.scala"
+
+ mapper
+ }
+
override def execute() {
- if (compilerPath.isEmpty) error("Mandatory attribute 'compilerpath' is not set.")
- if (sourceDir.isEmpty) error("Mandatory attribute 'srcdir' is not set.")
- if (destinationDir.isEmpty) error("Mandatory attribute 'destdir' is not set.")
+ def plural(x: Int) = if (x > 1) "s" else ""
+
+ log("Executing ant task scalacfork, origin: %s".format(originOfThis), Project.MSG_VERBOSE)
+
+ val compilerPath = this.compilerPath getOrElse error("Mandatory attribute 'compilerpath' is not set.")
+ val sourceDir = this.sourceDir getOrElse error("Mandatory attribute 'srcdir' is not set.")
+ val destinationDir = this.destinationDir getOrElse error("Mandatory attribute 'destdir' is not set.")
val settings = new Settings
- settings.d = destinationDir.get
- if (!compTarget.isEmpty) settings.target = compTarget.get
- if (!compilationPath.isEmpty) settings.classpath = compilationPath.get
- if (!sourcePath.isEmpty) settings.sourcepath = sourcePath.get
- if (compTarget.isDefined && compTarget.get == "msil") settings.sourcedir = sourceDir.get
- if (!params.isEmpty) settings.more = params.get
+ settings.d = destinationDir
- // not yet used: compilerPath, sourcedir (used in mapper), failonerror, timeout
+ compTarget foreach (settings.target = _)
+ compilationPath foreach (settings.classpath = _)
+ sourcePath foreach (settings.sourcepath = _)
+ params foreach (settings.more = _)
+
+ if (isMSIL)
+ settings.sourcedir = sourceDir
+
+ val mapper = createMapper()
- val mapper = new GlobPatternMapper()
- if (compTarget.isDefined && compTarget.get == "msil")
- mapper.setTo("*.msil")
- else
- mapper.setTo("*.class")
- mapper.setFrom("*.scala")
val includedFiles: Array[File] =
new SourceFileScanner(this).restrict(
- getDirectoryScanner(sourceDir.get).getIncludedFiles,
- sourceDir.get,
- destinationDir.get,
+ getDirectoryScanner(sourceDir).getIncludedFiles,
+ sourceDir,
+ destinationDir,
mapper
- ) map (new File(sourceDir.get, _))
- if (includedFiles.size > 0 || argfile.isDefined) {
- if (includedFiles.size > 0)
- log("Compiling "+ includedFiles.size +" file"+
- (if (includedFiles.size > 1) "s" else "") +" to "+ destinationDir.get)
- if (argfile.isDefined)
- log("Using argument file: @"+ argfile.get)
-
- val java = new Java(this) // set this as owner
- java.setFork(true)
- // using 'setLine' creates multiple arguments out of a space-separated string
- if (!jvmArgs.isEmpty) java.createJvmarg().setLine(jvmArgs.get)
- java.setClasspath(compilerPath.get)
- java.setClassname("scala.tools.nsc.Main")
- if (!timeout.isEmpty) java.setTimeout(timeout.get)
-
- //dump the arguments to a file and do "java @file"
- val tempArgFile = File.createTempFile("scalacfork","")
- val outf = new FileWriter(tempArgFile)
- for (arg <- settings.toArgs)
- { outf.write(arg) ; outf.write(" ") }
- for (file <- includedFiles)
- { outf.write(file.getPath) ; outf.write(" ") }
- outf.close
-
- java.createArg().setValue("@"+ tempArgFile.getAbsolutePath)
- if (argfile.isDefined)
- java.createArg().setValue("@"+ argfile.get)
-
- log(java.getCommandLine.getCommandline.mkString("", " ", ""), Project.MSG_VERBOSE)
- val res = java.executeJava()
- if (failOnError && res != 0)
- error("Compilation failed because of an internal compiler error;"+
- " see the error output for details.")
- }
+ ) map (x => new File(sourceDir, x))
+
+ /** Nothing to do. */
+ if (includedFiles.isEmpty && argfile.isEmpty)
+ return
+
+ if (includedFiles.nonEmpty)
+ log("Compiling %d file%s to %s".format(includedFiles.size, plural(includedFiles.size), destinationDir))
+
+ argfile foreach (x => log("Using argfile file: @" + x))
+
+ val java = new Java(this) // set this as owner
+ java setFork true
+ // using 'setLine' creates multiple arguments out of a space-separated string
+ jvmArgs foreach (java.createJvmarg() setLine _)
+ timeout foreach (java setTimeout _)
+
+ java setClasspath compilerPath
+ java setClassname MainClass
+
+ // dump the arguments to a file and do "java @file"
+ val tempArgFile = io.File.makeTemp("scalacfork")
+ val tokens = settings.toArgs ++ (includedFiles map (_.getPath))
+ tempArgFile writeAll (tokens mkString " ")
+
+ val paths = List(Some(tempArgFile.toAbsolute.path), argfile).flatten map (_.toString)
+ val res = execWithArgFiles(java, paths)
+
+ if (failOnError && res != 0)
+ error("Compilation failed because of an internal compiler error;"+
+ " see the error output for details.")
}
}
diff --git a/src/compiler/scala/tools/ant/sabbus/TaskArgs.scala b/src/compiler/scala/tools/ant/sabbus/TaskArgs.scala
index 72f091cecc..20a6791648 100644
--- a/src/compiler/scala/tools/ant/sabbus/TaskArgs.scala
+++ b/src/compiler/scala/tools/ant/sabbus/TaskArgs.scala
@@ -14,22 +14,10 @@ import java.io.File
import org.apache.tools.ant.Task
import org.apache.tools.ant.types.{Path, Reference}
-trait TaskArgs { this: Task =>
+trait CompilationPathProperty {
+ this: Task =>
- def setId(input: String) {
- id = Some(input)
- }
-
- def setParams(input: String) {
- params = params match {
- case None => Some(input)
- case Some(ps) => Some(ps + " " + input)
- }
- }
-
- def setTarget(input: String) {
- compTarget = Some(input)
- }
+ protected var compilationPath: Option[Path] = None
def setCompilationPath(input: Path) {
if (compilationPath.isEmpty) compilationPath = Some(input)
@@ -44,6 +32,25 @@ trait TaskArgs { this: Task =>
def setCompilationPathRef(input: Reference) {
createCompilationPath.setRefid(input)
}
+}
+
+trait TaskArgs extends CompilationPathProperty {
+ this: Task =>
+
+ def setId(input: String) {
+ id = Some(input)
+ }
+
+ def setParams(input: String) {
+ params = params match {
+ case None => Some(input)
+ case Some(ps) => Some(ps + " " + input)
+ }
+ }
+
+ def setTarget(input: String) {
+ compTarget = Some(input)
+ }
def setSrcPath(input: Path) {
if (sourcePath.isEmpty) sourcePath = Some(input)
@@ -80,8 +87,9 @@ trait TaskArgs { this: Task =>
protected var id: Option[String] = None
protected var params: Option[String] = None
protected var compTarget: Option[String] = None
- protected var compilationPath: Option[Path] = None
protected var sourcePath: Option[Path] = None
protected var compilerPath: Option[Path] = None
protected var destinationDir: Option[File] = None
+
+ def isMSIL = compTarget exists (_ == "msil")
}
diff --git a/src/compiler/scala/tools/ant/sabbus/Use.scala b/src/compiler/scala/tools/ant/sabbus/Use.scala
index 792efc3af8..fa17c48b63 100644
--- a/src/compiler/scala/tools/ant/sabbus/Use.scala
+++ b/src/compiler/scala/tools/ant/sabbus/Use.scala
@@ -61,7 +61,7 @@ class Use extends MatchingTask {
if (errors > 0)
error("Compilation failed with " + errors + " error" + (if (errors > 1) "s" else "") + ".")
else if (warnings > 0)
- log("Compilation suceeded with " + warnings + " warning" + (if (warnings > 1) "s" else "") + ".")
+ log("Compilation succeeded with " + warnings + " warning" + (if (warnings > 1) "s" else "") + ".")
}
catch {
case CompilationFailure(msg, ex) =>
diff --git a/src/compiler/scala/tools/ant/templates/tool-unix.tmpl b/src/compiler/scala/tools/ant/templates/tool-unix.tmpl
index ac50e3de54..b87463f0b9 100644
--- a/src/compiler/scala/tools/ant/templates/tool-unix.tmpl
+++ b/src/compiler/scala/tools/ant/templates/tool-unix.tmpl
@@ -1,5 +1,5 @@
#!/bin/sh
-
+#
##############################################################################
# Copyright 2002-2010, LAMP/EPFL
#
@@ -63,6 +63,19 @@ fi
# Reminder: substitution ${JAVA_OPTS:=-Xmx256M -Xms16M} DO NOT work on Solaris
[ -n "$JAVA_OPTS" ] || JAVA_OPTS="@javaflags@"
+# break out -D options and add them to JAVA_OPTS as well so they reach the
+# underlying JVM in time to do some good.
+for i
+do
+ case "$i" in
+ -D*)
+ JAVA_OPTS="$JAVA_OPTS $i" ;;
+ *)
+ ;;
+ esac
+done
+
+
if [ -z "$JAVACMD" -a -n "$JAVA_HOME" -a -x "$JAVA_HOME/bin/java" ]; then
JAVACMD="$JAVA_HOME/bin/java"
fi
diff --git a/src/compiler/scala/tools/nsc/CompileClient.scala b/src/compiler/scala/tools/nsc/CompileClient.scala
index d8c8caac05..b672a616c3 100644
--- a/src/compiler/scala/tools/nsc/CompileClient.scala
+++ b/src/compiler/scala/tools/nsc/CompileClient.scala
@@ -6,8 +6,11 @@
package scala.tools.nsc
-import java.io.{BufferedReader, File, InputStreamReader, PrintWriter}
+import java.io.{ BufferedReader, File, InputStreamReader, PrintWriter }
import Properties.fileEndings
+import scala.tools.util.PathResolver
+import io.Path
+import util.ClassPath
/** The client part of the fsc offline compiler. Instead of compiling
* things itself, it send requests to a CompileServer.
@@ -26,11 +29,7 @@ class StandardCompileClient {
/** Convert a sequence of filenames, separated by <code>File.pathSeparator</code>,
* into absolute filenames.
*/
- def absFileNames(paths: String) = {
- val sep = File.pathSeparator
- val pathsList = paths.split(sep).toList
- pathsList map absFileName mkString sep
- }
+ def absFileNames(paths: String) = ClassPath.map(paths, absFileName)
protected def normalize(args: Array[String]): (String, String) = {
var i = 0
@@ -40,7 +39,7 @@ class StandardCompileClient {
while (i < args.length) {
val arg = args(i)
if (fileEndings exists(arg endsWith _)) {
- args(i) = absFileName(arg)
+ args(i) = Path(arg).toAbsolute.path
} else if (arg startsWith "-J") {
//see http://java.sun.com/j2se/1.5.0/docs/tooldocs/solaris/javac.html#J
vmArgs append " "+arg.substring(2)
@@ -57,7 +56,7 @@ class StandardCompileClient {
if (i < args.length) {
arg match {
case "-classpath" | "-sourcepath" | "-bootclasspath" | "-extdirs" | "-d" =>
- args(i) = absFileNames(args(i))
+ args(i) = PathResolver.makeAbsolute(args(i))
i += 1
case "-server" =>
serverAdr = args(i)
@@ -76,36 +75,41 @@ class StandardCompileClient {
val (vmArgs, serverAdr) = normalize(args)
if (version) {
- Console.println(versionMsg)
+ Console println versionMsg
return 0
}
if (verbose) {
- Console.println("[Server arguments: " + args.mkString("", " ", "]"))
- Console.println("[VM arguments: " + vmArgs + "]")
+ Console println args.mkString("[Server arguments: ", " ", "]")
+ Console println "[VM arguments: %s]".format(vmArgs)
}
- val socket = if (serverAdr == "") compileSocket.getOrCreateSocket(vmArgs, !shutdown)
- else compileSocket.getSocket(serverAdr)
- var sawerror = false
- if (socket eq null) {
- if (shutdown) {
- Console.println("[No compilation server running.]")
- } else {
- Console.println("Compilation failed.")
- sawerror = true
- }
- } else {
- val out = new PrintWriter(socket.getOutputStream(), true)
- val in = new BufferedReader(new InputStreamReader(socket.getInputStream()))
- out.println(compileSocket.getPassword(socket.getPort()))
- out.println(args.mkString("", "\0", ""))
- var fromServer = in.readLine()
- while (fromServer ne null) {
- if (compileSocket.errorPattern.matcher(fromServer).matches)
- sawerror = true
- Console.println(fromServer)
- fromServer = in.readLine()
- }
- in.close ; out.close ; socket.close
+ val socket =
+ if (serverAdr == "") compileSocket.getOrCreateSocket(vmArgs, !shutdown)
+ else Some(compileSocket.getSocket(serverAdr))
+
+ val sawerror: Boolean = socket match {
+ case None =>
+ val msg = if (shutdown) "[No compilation server running.]" else "Compilation failed."
+ Console println msg
+ !shutdown
+
+ case Some(sock) =>
+ var wasError = false
+
+ sock.applyReaderAndWriter { (in, out) =>
+ out println compileSocket.getPassword(sock.getPort())
+ out println args.mkString("\0")
+ def loop: Unit = in.readLine() match {
+ case null => ()
+ case fromServer =>
+ if (compileSocket.errorPattern matcher fromServer matches)
+ wasError = true
+
+ Console println fromServer
+ loop
+ }
+ loop
+ }
+ wasError
}
if (sawerror) 1 else 0
}
diff --git a/src/compiler/scala/tools/nsc/CompileServer.scala b/src/compiler/scala/tools/nsc/CompileServer.scala
index 67276b861f..c4f9b1d9f1 100644
--- a/src/compiler/scala/tools/nsc/CompileServer.scala
+++ b/src/compiler/scala/tools/nsc/CompileServer.scala
@@ -70,75 +70,78 @@ class StandardCompileServer extends SocketServer
(totalMemory - freeMemory).toDouble / maxMemory.toDouble > MaxCharge
}
- protected def newOfflineCompilerCommand(
- arguments: List[String],
- settings: Settings,
- error: String => Unit,
- interactive: Boolean
- ) = new OfflineCompilerCommand(arguments, settings, error, interactive)
+ protected def newOfflineCompilerCommand(arguments: List[String], settings: Settings) =
+ new OfflineCompilerCommand(arguments, settings)
def session() {
printMemoryStats()
val password = compileSocket getPassword port
val guessedPassword = in.readLine()
val input = in.readLine()
- if ((input ne null) && password == guessedPassword) {
- val args = input.split("\0",-1).toList
- if (args contains "-shutdown") {
- out.println("[Compile server exited]")
- shutDown = true
- return
- }
- if (args contains "-reset") {
- out.println("[Compile server was reset]")
- compiler = null
- return
- }
- def error(msg: String) {
- out.println(/*new Position*/ FakePos("fsc"),
- msg + "\n fsc -help gives more information")
- }
- val command = newOfflineCompilerCommand(args, new Settings(error), error, false)
- reporter = new ConsoleReporter(command.settings, in, out) {
- // disable prompts, so that compile server cannot block
- override def displayPrompt = ()
- }
+ if (input == null || password != guessedPassword)
+ return
- if (command.shouldStopWithInfo) {
- reporter.info(null,
- command.getInfoMessage(newGlobal(command.settings, reporter)), true)
- } else if (command.files.isEmpty)
- reporter.info(null, command.usageMsg, true)
- else {
- try {
- if ((compiler ne null) && settingsAreCompatible(command.settings, compiler.settings)) {
- compiler.settings = command.settings
- compiler.reporter = reporter
- } else {
- if (args contains "-verbose")
- out.println("[Starting new compile server instance]")
- compiler = newGlobal(command.settings, reporter)
- }
- val c = compiler
- val run = new c.Run()
- run compile command.files
- } catch {
- case ex @ FatalError(msg) =>
- if (command.settings.debug.value)
- ex.printStackTrace(out);
- reporter.error(null, "fatal error: " + msg)
- compiler = null
- case ex: Throwable =>
- ex.printStackTrace(out);
- reporter.error(null, "fatal error (server aborted): " + ex.getMessage())
- shutDown = true
- }
- reporter.printSummary()
- if (isMemoryFullEnough)
- compiler = null
+ val args = input.split("\0", -1).toList
+ if (args contains "-shutdown") {
+ out.println("[Compile server exited]")
+ shutDown = true
+ return
+ }
+ if (args contains "-reset") {
+ out.println("[Compile server was reset]")
+ compiler = null
+ return
+ }
+
+ def error(msg: String) {
+ out.println(FakePos("fsc"), msg + "\n fsc -help gives more information")
+ }
+
+ val command = newOfflineCompilerCommand(args, new Settings(error))
+
+ reporter = new ConsoleReporter(command.settings, in, out) {
+ // disable prompts, so that compile server cannot block
+ override def displayPrompt = ()
+ }
+
+ if (command.shouldStopWithInfo)
+ reporter.info(null, command.getInfoMessage(newGlobal(command.settings, reporter)), true)
+ else if (command.files.isEmpty)
+ reporter.info(null, command.usageMsg, true)
+ else {
+ try {
+ if (compiler != null && command.settings == compiler.settings) {
+ compiler.settings = command.settings
+ compiler.reporter = reporter
+ }
+ else {
+ if (args contains "-verbose") {
+ val reason = if (compiler == null) "compiler is null" else "settings not equal"
+ out.println("[Starting new compile server instance because %s]".format(reason))
}
+ compiler = newGlobal(command.settings, reporter)
+ }
+ val c = compiler
+ val run = new c.Run()
+ run compile command.files
+ }
+ catch {
+ case ex @ FatalError(msg) =>
+ if (command.settings.debug.value)
+ ex.printStackTrace(out);
+ reporter.error(null, "fatal error: " + msg)
+ compiler = null
+ case ex: Throwable =>
+ ex.printStackTrace(out);
+ reporter.error(null, "fatal error (server aborted): " + ex.getMessage())
+ shutDown = true
+ }
}
+
+ reporter.printSummary()
+ if (isMemoryFullEnough)
+ compiler = null
}
/** A directory holding redirected output */
diff --git a/src/compiler/scala/tools/nsc/CompileSocket.scala b/src/compiler/scala/tools/nsc/CompileSocket.scala
index 9e626ead39..d4697b69e6 100644
--- a/src/compiler/scala/tools/nsc/CompileSocket.scala
+++ b/src/compiler/scala/tools/nsc/CompileSocket.scala
@@ -12,10 +12,9 @@ import java.util.regex.Pattern
import java.net._
import java.security.SecureRandom
-import io.{ File, Path }
+import io.{ File, Path, Process, Socket }
import scala.util.control.Exception.catching
-
-// class CompileChannel { }
+import scala.tools.util.StringOps.splitWhere
/** This class manages sockets for the fsc offline compiler. */
class CompileSocket {
@@ -24,13 +23,12 @@ class CompileSocket {
/** The prefix of the port identification file, which is followed
* by the port number.
*/
- protected def dirName = "scalac-compile-server-port" //todo: lazy val
-
- protected def cmdName = Properties.cmdName //todo: lazy val
+ protected lazy val dirName = "scalac-compile-server-port"
+ protected lazy val cmdName = Properties.scalaCmd
/** The vm part of the command to start a new scala compile server */
protected val vmCommand = Properties.scalaHome match {
- case null => cmdName
+ case "" => cmdName
case dirname =>
val trial = File(dirname) / "bin" / cmdName
if (trial.canRead) trial.path
@@ -80,23 +78,16 @@ class CompileSocket {
/** The command which starts the compile server, given vm arguments.
*
* @param vmArgs the argument string to be passed to the java or scala command
- * the string must be either empty or start with a ' '.
*/
- private def serverCommand(vmArgs: String): String =
- vmCommand + vmArgs + " " + serverClass
+ private def serverCommand(vmArgs: Seq[String]): Seq[String] =
+ Seq(vmCommand) ++ vmArgs ++ Seq(serverClass) filterNot (_ == "")
/** Start a new server; returns true iff it succeeds */
private def startNewServer(vmArgs: String) {
- val cmd = serverCommand(vmArgs)
- info("[Executed command: " + cmd + "]")
- try {
- Runtime.getRuntime().exec(cmd)
-// val exitVal = proc.waitFor()
-// info("[Exit value: " + exitVal + "]")
- } catch {
- case ex: IOException =>
- fatal("Cannot start compilation daemon." +
- "\ntried command: " + cmd)
+ val cmd = serverCommand(vmArgs split " " toSeq)
+ info("[Executed command: %s]" format cmd)
+ try Process exec cmd catch {
+ case ex: IOException => fatal("Cannot start compilation daemon.\ntried command: %s" format cmd)
}
}
@@ -104,13 +95,13 @@ class CompileSocket {
def portFile(port: Int) = portsDir / File(port.toString)
/** Poll for a server port number; return -1 if none exists yet */
- private def pollPort(): Int =
- portsDir.list.toList match {
- case Nil => -1
- case p :: xs =>
- xs forall (_.delete())
- p.name.toInt
- }
+ private def pollPort(): Int = portsDir.list match {
+ case it if !it.hasNext => -1
+ case it =>
+ val ret = it.next.name.toInt
+ it foreach (_.delete())
+ ret
+ }
/** Get the port number to which a scala compile server is connected;
* If no server is running yet, then create one.
@@ -138,7 +129,7 @@ class CompileSocket {
val file = portFile(port)
val secret = new SecureRandom().nextInt.toString
- try file writeAll List(secret) catch {
+ try file writeAll secret catch {
case e @ (_: FileNotFoundException | _: SecurityException) =>
fatal("Cannot create file: %s".format(file.path))
}
@@ -148,38 +139,36 @@ class CompileSocket {
def deletePort(port: Int) = portFile(port).delete()
/** Get a socket connected to a daemon. If create is true, then
- * create a new daemon if necessary. Returns null if the connection
+ * create a new daemon if necessary. Returns None if the connection
* cannot be established.
*/
- def getOrCreateSocket(vmArgs: String, create: Boolean = true): Socket = {
- val nAttempts = 49 // try for about 5 seconds
- def getsock(attempts: Int): Socket =
- if (attempts == 0) {
- error("Unable to establish connection to compilation daemon")
- null
- } else {
- val port = if(create) getPort(vmArgs) else pollPort()
- if(port < 0) return null
- val hostAdr = InetAddress.getLocalHost()
- try {
- val result = new Socket(hostAdr, port)
- info("[Connected to compilation daemon at port " + port + "]")
- result
- } catch {
- case e: /*IO+Security*/Exception =>
- info(e.toString)
- info("[Connecting to compilation daemon at port " +
- port + " failed; re-trying...]")
+ def getOrCreateSocket(vmArgs: String, create: Boolean = true): Option[Socket] = {
+ // try for 5 seconds
+ val retryDelay = 100
+ val maxAttempts = (5 * 1000) / retryDelay
+
+ def getsock(attempts: Int): Option[Socket] = attempts match {
+ case 0 => error("Unable to establish connection to compilation daemon") ; None
+ case num =>
+ val port = if (create) getPort(vmArgs) else pollPort()
+ if (port < 0) return None
+
+ Socket(InetAddress.getLocalHost(), port).either match {
+ case Right(socket) =>
+ info("[Connected to compilation daemon at port %d]" format port)
+ Some(socket)
+ case Left(err) =>
+ info(err.toString)
+ info("[Connecting to compilation daemon at port %d failed; re-trying...]" format port)
if (attempts % 2 == 0)
- portFile(port).delete // 50% chance to stop trying on this port
-
- Thread.sleep(100) // delay before retrying
+ deletePort(port) // 50% chance to stop trying on this port
+ Thread sleep retryDelay // delay before retrying
getsock(attempts - 1)
}
- }
- getsock(nAttempts)
+ }
+ getsock(maxAttempts)
}
// XXX way past time for this to be central
@@ -187,24 +176,13 @@ class CompileSocket {
try { Some(x.toInt) }
catch { case _: NumberFormatException => None }
- def getSocket(serverAdr: String): Socket = {
- def fail = fatal("Malformed server address: %s; exiting" format serverAdr)
- (serverAdr indexOf ':') match {
- case -1 => fail
- case cpos =>
- val hostName: String = serverAdr take cpos
- parseInt(serverAdr drop (cpos + 1)) match {
- case Some(port) => getSocket(hostName, port)
- case _ => fail
- }
- }
- }
+ def getSocket(serverAdr: String): Socket = (
+ for ((name, portStr) <- splitWhere(serverAdr, _ == ':', true) ; port <- parseInt(portStr)) yield
+ getSocket(name, port)
+ ) getOrElse fatal("Malformed server address: %s; exiting" format serverAdr)
def getSocket(hostName: String, port: Int): Socket =
- try new Socket(hostName, port) catch {
- case e @ (_: IOException | _: SecurityException) =>
- fatal("Unable to establish connection to server %s:%d; exiting".format(hostName, port))
- }
+ Socket(hostName, port).opt getOrElse fatal("Unable to establish connection to server %s:%d; exiting".format(hostName, port))
def getPassword(port: Int): String = {
val ff = portFile(port)
diff --git a/src/compiler/scala/tools/nsc/CompilerCommand.scala b/src/compiler/scala/tools/nsc/CompilerCommand.scala
index 8465227133..c8c7482811 100644
--- a/src/compiler/scala/tools/nsc/CompilerCommand.scala
+++ b/src/compiler/scala/tools/nsc/CompilerCommand.scala
@@ -6,53 +6,40 @@
package scala.tools.nsc
-import Settings.Setting
import java.io.IOException
+import scala.collection.mutable.ListBuffer
+import io.File
/** A class representing command line info for scalac */
-class CompilerCommand(
- arguments: List[String],
- val settings: Settings,
- error: String => Unit,
- interactive: Boolean,
- shouldProcessArguments: Boolean)
-{
- def this(arguments: List[String], settings: Settings, error: String => Unit, interactive: Boolean) =
- this(arguments, settings, error, interactive, true)
+class CompilerCommand(arguments: List[String], val settings: Settings) {
+ def this(arguments: List[String], error: String => Unit) = this(arguments, new Settings(error))
+ type Setting = Settings#Setting
/** file extensions of files that the compiler can process */
lazy val fileEndings = Properties.fileEndings
- /** Private buffer for accumulating files to compile */
- private var fs: List[String] = List()
-
- /** Public list of files to compile */
- def files: List[String] = fs.reverse
-
/** The name of the command */
- val cmdName = "scalac"
+ def cmdName = "scalac"
+ private def isFsc = cmdName == "fsc"
private val helpSyntaxColumnWidth: Int =
- (settings.allSettings map (_.helpSyntax.length)) max
+ (settings.visibleSettings map (_.helpSyntax.length)) max
- private def format(s: String): String = {
- val buf = new StringBuilder(s)
- var i = s.length
- while (i < helpSyntaxColumnWidth) { buf.append(' '); i += 1 }
- buf.toString()
- }
+ private def format(s: String): String =
+ if (s.length >= helpSyntaxColumnWidth) s
+ else s + (" " * (helpSyntaxColumnWidth - s.length))
/** Creates a help message for a subset of options based on cond */
def createUsageMsg(label: String, cond: (Setting) => Boolean): String =
- settings.allSettings .
+ settings.visibleSettings .
filter(cond) .
map(s => format(s.helpSyntax) + " " + s.helpDescription) .
- mkString("Usage: %s <options> <source files>\n%s options include:\n " .
+ toList.sorted.mkString("Usage: %s <options> <source files>\n%s options include:\n " .
format(cmdName, label), "\n ", "\n")
/** Messages explaining usage and options */
def usageMsg = createUsageMsg("where possible standard", _.isStandard)
- def fscUsageMsg = createUsageMsg("where possible standard", ( st => st.isStandard || st.isFscSpecific ))
+ def fscUsageMsg = createUsageMsg("where possible standard", ( st => st.isStandard || st.name == "-shutdown"))
def xusageMsg = createUsageMsg("Possible advanced", _.isAdvanced)
def yusageMsg = createUsageMsg("Possible private", _.isPrivate)
@@ -60,8 +47,7 @@ class CompilerCommand(
// an informative message of some sort should be printed instead.
// (note: do not add "files.isEmpty" do this list)
val stopSettings = List[(() => Boolean, (Global) => String)](
- ((() => (settings.help.value _)() && (cmdName == "fsc")),
- fscUsageMsg + _.pluginOptionsHelp),
+ ((() => (settings.help.value _)() && isFsc), fscUsageMsg + _.pluginOptionsHelp),
(settings.help.value _, usageMsg + _.pluginOptionsHelp),
(settings.Xhelp.value _, _ => xusageMsg),
(settings.Yhelp.value _, _ => yusageMsg),
@@ -76,52 +62,33 @@ class CompilerCommand(
case None => ""
}
- /** Whether the command was processed okay */
- var ok = true
-
- /** Process the arguments and update the settings accordingly.
- This method is called only once, during initialization. */
- protected def processArguments() {
- // initialization
- var args = arguments
- def errorAndNotOk(msg: String) = { error(msg) ; ok = false }
-
- // given a @ argument expands it out
- def doExpand(x: String) =
- try { args = util.ArgumentsExpander.expandArg(x) ::: args.tail }
- catch { case ex: IOException => errorAndNotOk(ex.getMessage) }
-
- // true if it's a legit looking source file
- def isSourceFile(x: String) =
- (settings.script.value != "") ||
- (fileEndings exists (x endsWith _))
-
- // given an option for scalac finds out what it is
- def doOption(x: String): Unit = {
- if (interactive)
- return errorAndNotOk("no options can be given in interactive mode")
-
- val argsLeft = settings.parseParams(args)
- if (args != argsLeft) args = argsLeft
- else errorAndNotOk("bad option: '" + x + "'")
- }
+ /**
+ * Expands all arguments starting with @ to the contents of the
+ * file named like each argument.
+ */
+ def expandArg(arg: String): List[String] = {
+ def stripComment(s: String) = s takeWhile (_ != '#')
+ val file = File(arg stripPrefix "@")
+ if (!file.exists)
+ throw new java.io.FileNotFoundException("argument file %s could not be found" format file.name)
+
+ settings splitParams (file.lines() map stripComment mkString " ")
+ }
+
+ // override this if you don't want arguments processed here
+ def shouldProcessArguments: Boolean = true
- // cycle through args until empty or error
- while (!args.isEmpty && ok) args.head match {
- case x if x startsWith "@" => doExpand(x)
- case x if x startsWith "-" => doOption(x)
- case x if isSourceFile(x) => fs = x :: fs ; args = args.tail
- case "" => args = args.tail // quick fix [martin: for what?]
- case x => errorAndNotOk("don't know what to do with " + x)
+ def processArguments: (Boolean, List[String]) = {
+ // expand out @filename to the contents of that filename
+ val expandedArguments = arguments flatMap {
+ case x if x startsWith "@" => expandArg(x)
+ case x => List(x)
}
- ok &&= settings.checkDependencies
+ settings.processArguments(expandedArguments, true)
}
- // CompilerCommand needs processArguments called at the end of its constructor,
- // as does its subclass GenericRunnerCommand, but it cannot be called twice as it
- // accumulates arguments. The fact that it's called from within the constructors
- // makes initialization order an obstacle to simplicity.
- if (shouldProcessArguments)
- processArguments()
+ val (ok, files) =
+ if (shouldProcessArguments) processArguments
+ else (true, Nil)
}
diff --git a/src/compiler/scala/tools/nsc/EvalLoop.scala b/src/compiler/scala/tools/nsc/EvalLoop.scala
index 46579801bc..388bcf8ccc 100644
--- a/src/compiler/scala/tools/nsc/EvalLoop.scala
+++ b/src/compiler/scala/tools/nsc/EvalLoop.scala
@@ -6,22 +6,21 @@
package scala.tools.nsc
-trait EvalLoop {
+import annotation.tailrec
+import java.io.EOFException
+trait EvalLoop {
def prompt: String
def loop(action: (String) => Unit) {
- Console.print(prompt)
- try {
- val line = Console.readLine
- if (line.length() > 0) {
+ @tailrec def inner() {
+ Console.print(prompt)
+ val line = try Console.readLine catch { case _: EOFException => null }
+ if (line != null && line != "") {
action(line)
- loop(action)
+ inner()
}
}
- catch {
- case _: java.io.EOFException => //nop
- }
+ inner()
}
-
}
diff --git a/src/compiler/scala/tools/nsc/FatalError.scala b/src/compiler/scala/tools/nsc/FatalError.scala
index 7bc1bc52d8..f9a801e611 100644
--- a/src/compiler/scala/tools/nsc/FatalError.scala
+++ b/src/compiler/scala/tools/nsc/FatalError.scala
@@ -6,7 +6,9 @@
package scala.tools.nsc
-case class FatalError(msg: String) extends Exception(msg)
+import scala.util.control.ControlThrowable
+
+case class FatalError(msg: String) extends Throwable(msg)
class MissingRequirementError(val req: String) extends FatalError(req + " not found.")
diff --git a/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala b/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala
index bde623b5d7..f3ac556d4f 100644
--- a/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala
+++ b/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala
@@ -9,71 +9,54 @@ package scala.tools.nsc
/** A command for ScriptRunner */
class GenericRunnerCommand(
- allargs: List[String],
- override val settings: GenericRunnerSettings,
- error: String => Unit)
-extends CompilerCommand(allargs, settings, error, false, false)
-{
- def this(allargs: List[String], error: String=>Unit) =
- this(allargs, new GenericRunnerSettings(error), error)
+ args: List[String],
+ override val settings: GenericRunnerSettings)
+extends CompilerCommand(args, settings) {
- def this(allargs: List[String]) =
- this(allargs, str => Console.println("Error: " + str))
+ def this(args: List[String], error: String => Unit) =
+ this(args, new GenericRunnerSettings(error))
+
+ def this(args: List[String]) =
+ this(args, str => Console.println("Error: " + str))
/** name of the associated compiler command */
override val cmdName = "scala"
val compCmdName = "scalac"
- /** What to run. If it is None, then the interpreter should be started */
- var thingToRun: Option[String] = None
+ // change CompilerCommand behavior
+ override def shouldProcessArguments: Boolean = false
- /** Arguments to pass to the object or script to run */
- var arguments: List[String] = Nil
+ /** thingToRun: What to run. If it is None, then the interpreter should be started
+ * arguments: Arguments to pass to the object or script to run
+ */
+ val (thingToRun, arguments) = settings.processArguments(args, false)._2 match {
+ case Nil => (None, Nil)
+ case hd :: tl => (Some(hd), tl)
+ }
- override protected def processArguments() {
- var args = allargs
+ override def usageMsg = """
+%s [ <option> ]... [<torun> <arguments>]
- while (!args.isEmpty && ok && args.head.startsWith("-")) {
- val args0 = args
- args = settings parseParams args
- if (args eq args0) {
- error("bad option: '" + args.head + "'")
- ok = false
- }
- }
+All options to %s are allowed. See %s -help.
- if (!args.isEmpty) {
- thingToRun = Some(args.head)
- arguments = args.tail
- }
- }
+<torun>, if present, is an object or script file to run.
+If no <torun> is present, run an interactive shell.
- // we can safely call processArguments since we passed the superclass shouldProcessArguments=false
- processArguments()
-
- override def usageMsg = {
- cmdName + " [ <option> ]... [<torun> <arguments>]\n" +
- "\n" +
- "All options to "+compCmdName+" are allowed. See "+compCmdName+" -help.\n" +
- "\n" +
- "<torun>, if present, is an object or script file to run.\n" +
- "If no <torun> is present, run an interactive shell.\n" +
- "\n" +
- "Option -howtorun allows explicitly specifying how to run <torun>:\n" +
- " script: it is a script file\n" +
- " object: it is an object name\n" +
- " guess: (the default) try to guess\n" +
- "\n" +
- "Option -i requests that a file be pre-loaded. It is only\n" +
- "meaningful for interactive shells.\n" +
- "\n" +
- "Option -e requests that its argument be executed as Scala code.\n" +
- "\n" +
- "Option -savecompiled requests that the compiled script be saved\n" +
- "for future use.\n" +
- "\n" +
- "Option -nocompdaemon requests that the fsc offline compiler not be used.\n" +
- "\n" +
- "Option -Dproperty=value sets a Java system property.\n"
- }
+Option -howtorun allows explicitly specifying how to run <torun>:
+ script: it is a script file
+ object: it is an object name
+ guess: (the default) try to guess
+
+Option -i requests that a file be pre-loaded. It is only
+meaningful for interactive shells.
+
+Option -e requests that its argument be executed as Scala code.
+
+Option -savecompiled requests that the compiled script be saved
+for future use.
+
+Option -nocompdaemon requests that the fsc offline compiler not be used.
+
+Option -Dproperty=value sets a Java system property.
+""".format(cmdName, compCmdName, compCmdName)
}
diff --git a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala
index 76cb8e608b..6697146a5a 100644
--- a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala
+++ b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala
@@ -38,6 +38,4 @@ extends Settings(error) {
BooleanSetting(
"-nocompdaemon",
"do not use the fsc compilation daemon")
-
- val defines = DefinesSetting
}
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index 1128150cb8..15da6f6fec 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -6,30 +6,29 @@
package scala.tools.nsc
-import java.io.{File, FileOutputStream, PrintWriter}
-import java.io.{IOException, FileNotFoundException}
-import java.nio.charset._
+import java.io.{ File, FileOutputStream, PrintWriter, IOException, FileNotFoundException }
+import java.nio.charset.{ Charset, IllegalCharsetNameException, UnsupportedCharsetException }
import compat.Platform.currentTime
-import scala.tools.nsc.io.{SourceReader, AbstractFile, Path}
-import scala.tools.nsc.reporters._
-import scala.tools.nsc.util.{ClassPath, MsilClassPath, JavaClassPath, SourceFile, BatchSourceFile, OffsetPosition, RangePosition}
-import scala.collection.mutable.{HashSet, HashMap, ListBuffer}
+import io.{ SourceReader, AbstractFile, Path }
+import reporters.{ Reporter, ConsoleReporter }
+import util.{ ClassPath, SourceFile, Statistics, BatchSourceFile }
+import collection.mutable.{ HashSet, HashMap, ListBuffer }
+import reflect.generic.{ PickleBuffer }
-import symtab._
-import symtab.classfile.{PickleBuffer, Pickler}
-import dependencies.{DependencyAnalysis}
-import util.Statistics
+import symtab.{ Flags, SymbolTable, SymbolLoaders }
+import symtab.classfile.Pickler
+import dependencies.DependencyAnalysis
import plugins.Plugins
import ast._
import ast.parser._
import typechecker._
import transform._
-import backend.icode.{ICodes, GenICode, Checkers}
-import backend.ScalaPrimitives
+
+import backend.icode.{ ICodes, GenICode, Checkers }
+import backend.{ ScalaPrimitives, Platform, MSILPlatform, JavaPlatform }
import backend.jvm.GenJVM
-import backend.msil.GenMSIL
-import backend.opt.{Inliners, ClosureElimination, DeadCodeElimination}
+import backend.opt.{ Inliners, ClosureElimination, DeadCodeElimination }
import backend.icode.analysis._
class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
@@ -46,7 +45,16 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
def this(settings: Settings) =
this(settings, new ConsoleReporter(settings))
- //def this() = this(new Settings, new ConsoleReporter)
+ // platform specific elements
+
+ type ThisPlatform = Platform[_] { val global: Global.this.type }
+
+ lazy val platform: ThisPlatform =
+ if (forMSIL) new { val global: Global.this.type = Global.this } with MSILPlatform
+ else new { val global: Global.this.type = Global.this } with JavaPlatform
+
+ def classPath: ClassPath[_] = platform.classPath
+ def rootLoader: LazyType = platform.rootLoader
// sub-components --------------------------------------------------
@@ -117,7 +125,7 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
// ------------ Hooks for interactive mode-------------------------
- /** Called every time an AST node is succesfully typedchecked in typerPhase.
+ /** Called every time an AST node is successfully typechecked in typerPhase.
*/
def signalDone(context: analyzer.Context, old: Tree, result: Tree) {}
@@ -127,10 +135,11 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
// ------------------ Reporting -------------------------------------
- import util.NoPosition
def error(msg: String) = reporter.error(NoPosition, msg)
- def warning(msg: String) = reporter.warning(NoPosition, msg)
- def inform(msg: String) = Console.err.println(msg)
+ def warning(msg: String) =
+ if (settings.Ywarnfatal.value) reporter.error(NoPosition, msg)
+ else reporter.warning(NoPosition, msg)
+ def inform(msg: String) = reporter.info(NoPosition, msg, true)
def inform[T](msg: String, value: T): T = { inform(msg+value); value }
//reporter.info(null, msg, true)
@@ -145,29 +154,26 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
if (settings.log contains phase.name) inform("[log " + phase + "] " + msg)
}
- class ErrorWithPosition(val pos: Int, val error: Throwable) extends Error
+ class ThrowableWithPosition(val pos: Int, val error: Throwable) extends Throwable
- def tryWith[T](pos: Int, body: => T): T = try {
- body
- } catch {
- case e : ErrorWithPosition => throw e
- case te: TypeError => throw te
- case e : Error => throw new ErrorWithPosition(pos, e)
- case e : RuntimeException => throw new ErrorWithPosition(pos, e)
- }
+ def tryWith[T](pos: Int, body: => T): T =
+ try body
+ catch {
+ case e : ThrowableWithPosition => throw e
+ case te: TypeError => throw te
+ case e : RuntimeException => throw new ThrowableWithPosition(pos, e)
+ }
- def catchWith[T](source : SourceFile, body : => T) : T = try {
- body
- } catch {
- case e : ErrorWithPosition =>
- logError("POS: " + source.dbg(e.pos), e)
- throw e.error
- }
+ def catchWith[T](source : SourceFile, body : => T) : T =
+ try body
+ catch {
+ case e : ThrowableWithPosition =>
+ logError("POS: " + source.dbg(e.pos), e)
+ throw e.error
+ }
def logError(msg: String, t: Throwable): Unit = ()
- def abort(msg: String) = throw new Error(msg)
-
// ------------ File interface -----------------------------------------
private val reader: SourceReader = {
@@ -211,21 +217,8 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
}
}
- lazy val classPath = {
- ClassPath.XO = settings.XO.value
- if (forMSIL)
- new MsilClassPath(settings.assemextdirs.value, settings.assemrefs.value,
- settings.sourcepath.value)
- else
- new JavaClassPath(settings.bootclasspath.value, settings.extdirs.value,
- settings.classpath.value, settings.sourcepath.value,
- settings.Xcodebase.value)
- }
-
- if (settings.verbose.value) {
- inform("[Classpath = " + classPath + "]")
- if (forMSIL) inform("[AssemRefs = " + settings.assemrefs.value + "]")
- }
+ if (settings.verbose.value || settings.Ylogcp.value)
+ inform("[Classpath = " + classPath.asClasspathString + "]")
def getSourceFile(f: AbstractFile): BatchSourceFile =
new BatchSourceFile(f, reader.read(f))
@@ -241,10 +234,6 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
val global: Global.this.type = Global.this
}
- def rootLoader: LazyType =
- if (forMSIL) new loaders.NamespaceLoader(classPath.asInstanceOf[MsilClassPath])
- else new loaders.JavaPackageLoader(classPath.asInstanceOf[JavaClassPath])
-
// ------------ Phases -------------------------------------------}
var globalPhase: Phase = NoPhase
@@ -473,13 +462,6 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
val runsRightAfter = None
} with DependencyAnalysis
- // phaseName = "msil"
- object genMSIL extends {
- val global: Global.this.type = Global.this
- val runsAfter = List[String]("dce")
- val runsRightAfter = None
- } with GenMSIL
-
// phaseName = "terminal"
object terminal extends {
val global: Global.this.type = Global.this
@@ -519,57 +501,45 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
/* Add the internal compiler phases to the phases set
*/
protected def computeInternalPhases() {
- phasesSet += syntaxAnalyzer // The parser
- phasesSet += analyzer.namerFactory // note: types are there because otherwise
- phasesSet += analyzer.packageObjects // consistency check after refchecks would fail.
+ phasesSet += syntaxAnalyzer // The parser
+ phasesSet += analyzer.namerFactory // note: types are there because otherwise
+ phasesSet += analyzer.packageObjects // consistency check after refchecks would fail.
phasesSet += analyzer.typerFactory
- phasesSet += superAccessors // add super accessors
- phasesSet += pickler // serialize symbol tables
- phasesSet += refchecks // perform reference and override checking, translate nested objects
-
-// if (false && settings.Xexperimental.value)
-// phasesSet += devirtualize // Desugar virtual classes4
+ phasesSet += superAccessors // add super accessors
+ phasesSet += pickler // serialize symbol tables
+ phasesSet += refchecks // perform reference and override checking, translate nested objects
+ // phasesSet += devirtualize // Desugar virtual classes
- phasesSet += uncurry // uncurry, translate function values to anonymous classes
- phasesSet += tailCalls // replace tail calls by jumps
+ phasesSet += uncurry // uncurry, translate function values to anonymous classes
+ phasesSet += tailCalls // replace tail calls by jumps
if (settings.specialize.value)
phasesSet += specializeTypes
- phasesSet += explicitOuter // replace C.this by explicit outer pointers, eliminate pattern matching
- phasesSet += erasure // erase generic types to Java 1.4 types, add interfaces for traits
- phasesSet += lazyVals //
- phasesSet += lambdaLift // move nested functions to top level
-// if (forJVM && settings.Xdetach.value)
-// phasesSet += detach // convert detached closures
- phasesSet += constructors // move field definitions into constructors
- phasesSet += mixer // do mixin composition
- phasesSet += cleanup // some platform-specific cleanups
- phasesSet += genicode // generate portable intermediate code
- phasesSet += inliner // optimization: do inlining
- phasesSet += closureElimination // optimization: get rid of uncalled closures
- phasesSet += deadCode // optimization: get rid of dead cpde
- phasesSet += terminal // The last phase in the compiler chain
-
- if (! forMSIL) {
- phasesSet += flatten // get rid of inner classes
- }
- if (forJVM) {
- phasesSet += liftcode // generate reified trees
- phasesSet += genJVM // generate .class files
- if (settings.make.value != "all")
- phasesSet += dependencyAnalysis
- }
- if (forMSIL) {
- phasesSet += genMSIL // generate .msil files
- }
+ phasesSet += explicitOuter // replace C.this by explicit outer pointers, eliminate pattern matching
+ phasesSet += erasure // erase types, add interfaces for traits
+ phasesSet += lazyVals
+ phasesSet += lambdaLift // move nested functions to top level
+ // if (forJVM && settings.Xdetach.value)
+ // phasesSet += detach // convert detached closures
+
+ phasesSet += constructors // move field definitions into constructors
+ phasesSet += mixer // do mixin composition
+ phasesSet += cleanup // some platform-specific cleanups
+ phasesSet += genicode // generate portable intermediate code
+ phasesSet += inliner // optimization: do inlining
+ phasesSet += closureElimination // optimization: get rid of uncalled closures
+ phasesSet += deadCode // optimization: get rid of dead cpde
+ phasesSet += terminal // The last phase in the compiler chain
}
+ protected def computePlatformPhases() = platform.platformPhases foreach (phasesSet += _)
/* Helper method for sequncing the phase assembly
*/
private def computePhaseDescriptors: List[SubComponent] = {
- computeInternalPhases() // Global.scala
- computePluginPhases() // plugins/Plugins.scala
- buildCompilerFromPhasesSet() // PhaseAssembly.scala
+ computeInternalPhases() // Global.scala
+ computePlatformPhases() // backend/Platform.scala
+ computePluginPhases() // plugins/Plugins.scala
+ buildCompilerFromPhasesSet() // PhaseAssembly.scala
}
/* The phase descriptor list */
@@ -578,15 +548,16 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
/* The set of phase objects that is the basis for the compiler phase chain */
protected val phasesSet : HashSet[SubComponent] = new HashSet[SubComponent]
- /** A description of the phases that will run */
- def phaseDescriptions: String = {
+ /** The names of the phases. */
+ lazy val phaseNames = {
new Run // force some initialization
- val messages =
- for (phase <- phaseDescriptors)
- yield phase.phaseName //todo: + " - " + phase.description
- messages.mkString("\n")
+ phaseDescriptors map (_.phaseName)
}
+ /** A description of the phases that will run */
+ def phaseDescriptions: String =
+ phaseNames mkString "\n" // todo: + " - " + phase.description
+
// ----------- Runs ---------------------------------------
private var curRun: Run = null
@@ -613,7 +584,7 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
curRun = this
//Console.println("starting run: " + id)
- // Can not take the phaseDescriptors.head even though its the syntaxAnalyser, this will implicitly
+ // Can not take the phaseDescriptors.head even though its the syntaxAnalyzer, this will implicitly
// call definitions.init which uses phase and needs it to be != NoPhase
val phase1 = syntaxAnalyzer.newPhase(NoPhase)
phase = phase1
@@ -689,6 +660,7 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
val typerPhase = phaseNamed("typer")
val picklerPhase = phaseNamed("pickler")
val refchecksPhase = phaseNamed("refchecks")
+ val uncurryPhase = phaseNamed("uncurry")
val explicitOuterPhase = phaseNamed("explicitouter")
val erasurePhase = phaseNamed("erasure")
@@ -704,12 +676,12 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
// ----------- Units and top-level classes and objects --------
private var unitbuf = new ListBuffer[CompilationUnit]
- var compiledFiles = new HashSet[AbstractFile]
+ var compiledFiles = new HashSet[String]
/** add unit to be compiled in this run */
private def addUnit(unit: CompilationUnit) {
unitbuf += unit
- compiledFiles += unit.source.file
+ compiledFiles += unit.source.file.path
}
/* An iterator returning all the units being compiled in this run */
@@ -729,11 +701,17 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
else if (sym.isModuleClass) compiles(sym.sourceModule)
else false
+ /** Is this run allowed to redefine the given symbol? Usually this is true
+ * if the run does not already compile `sym`, but for interactive mode
+ * we have a more liberal interpretation.
+ */
+ def canRedefine(sym: Symbol) = !compiles(sym)
+
// --------------- Compilation methods ----------------------------
/** Compile list of source files */
def compileSources(_sources: List[SourceFile]) {
- val depSources = dependencyAnalysis.filter(_sources.removeDuplicates) // bug #1268, scalac confused by duplicated filenames
+ val depSources = dependencyAnalysis.filter(_sources.distinct) // bug #1268, scalac confused by duplicated filenames
val sources = scalaObjectFirst(depSources)
if (reporter.hasErrors)
return // there is a problem already, e.g. a
@@ -747,7 +725,7 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
val startTime = currentTime
phase = globalPhase
globalPhase.run
- if (settings.print contains globalPhase.name)
+ if (settings.Xprint contains globalPhase.name)
if (settings.writeICode.value && globalPhase.id >= icodePhase.id) writeICode()
else if (settings.Xshowtrees.value) nodePrinters.printAll()
else printAllUnits()
@@ -763,9 +741,9 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
phase = globalPhase
if (globalPhase.id >= icodePhase.id) icodeChecker.checkICodes
else checker.checkTrees
- }
- else if (!settings.check.doAllPhases) {
- warning("It is not possible to check the result of the "+globalPhase.name+" phase")
+ }
+ else if (!settings.check.doAllPhases) {
+ warning("It is not possible to check the result of the "+globalPhase.name+" phase")
}
}
if (settings.Ystatistics.value) statistics.print(phase)
@@ -825,22 +803,15 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
/** Compile list of files given by their names */
def compile(filenames: List[String]) {
- try {
- val scriptMain = settings.script.value
- // Are we compiling a script?
- if (scriptMain != "") {
- if(filenames.length != 1)
- error("can only compile one script at a time")
- val scriptFile =
- ScriptRunner.wrappedScript(scriptMain, filenames.head, getSourceFile)
- compileSources(List(scriptFile))
- // No we are compiling regular source files
- } else {
- compileSources(filenames map getSourceFile)
- }
- } catch {
- case ex: IOException => error(ex.getMessage())
+ val scriptMain = settings.script.value
+ def sources: List[SourceFile] = scriptMain match {
+ case "" => filenames map getSourceFile
+ case main if filenames.length == 1 => List(ScriptRunner.wrappedScript(main, filenames.head, getSourceFile))
+ case _ => error("can only compile one script at a time") ; Nil
}
+
+ try compileSources(sources)
+ catch { case ex: IOException => error(ex.getMessage()) }
}
/** Compile abstract file until `globalPhase`, but at least
@@ -852,7 +823,7 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
" was found\n(This file cannot be loaded as a source file)"
inform(msg)
throw new FatalError(msg)
- } else if (!(compiledFiles contains file)) {
+ } else if (!(compiledFiles contains file.path)) {
compileLate(new CompilationUnit(getSourceFile(file)))
}
}
@@ -874,6 +845,20 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
refreshProgress
}
+ /**
+ * Attempt to locate a source file providing the given name as a top-level
+ * definition in the given context, and add it to the run via compileLate
+ * if found.
+ */
+ def compileSourceFor(context : analyzer.Context, name : Name) = false
+
+ /**
+ * Attempt to locate a source file providing the given name as a top-level
+ * definition with the given prefix, and add it to the run via compileLate
+ * if found.
+ */
+ def compileSourceFor(qual : Tree, name : Name) = false
+
/** Reset package class to state at typer (not sure what this
* is needed for?)
*/
@@ -924,7 +909,7 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
def getFile(clazz: Symbol, suffix: String): File = {
val outdirname = settings.outputDirs.outputDirFor(clazz.sourceFile)
var outdir = new File(if (outdirname.path == "") "." else outdirname.path)
- val filename = clazz.fullNameString('.')
+ val filename = clazz.fullName
var start = 0
var end = filename.indexOf('.', start)
while (end >= start) {
@@ -938,7 +923,7 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
private def writeICode() {
val printer = new icodes.TextPrinter(null, icodes.linearizer)
- icodes.classes.valuesIterator.foreach((cls) => {
+ icodes.classes.values.foreach((cls) => {
val suffix = if (cls.symbol hasFlag Flags.MODULE) "$.icode" else ".icode"
var file = getFile(cls.symbol, suffix)
// if (file.exists())
diff --git a/src/compiler/scala/tools/nsc/Interpreter.scala b/src/compiler/scala/tools/nsc/Interpreter.scala
index b2c5bc2415..0cc0f65640 100644
--- a/src/compiler/scala/tools/nsc/Interpreter.scala
+++ b/src/compiler/scala/tools/nsc/Interpreter.scala
@@ -6,15 +6,20 @@
package scala.tools.nsc
+import Predef.{ println => _, _ }
import java.io.{ File, PrintWriter, StringWriter, Writer }
+import File.pathSeparator
import java.lang.{ Class, ClassLoader }
import java.net.{ MalformedURLException, URL }
import java.lang.reflect
import reflect.InvocationTargetException
-import scala.collection.immutable.ListSet
+import scala.PartialFunction.{ cond, condOpt }
+import scala.tools.util.PathResolver
+import scala.reflect.Manifest
import scala.collection.mutable
-import scala.collection.mutable.{ ListBuffer, HashSet, ArrayBuffer }
+import scala.collection.mutable.{ ListBuffer, HashSet, HashMap, ArrayBuffer }
+import scala.collection.immutable.Set
import scala.tools.nsc.util.ScalaClassLoader
import ScalaClassLoader.URLClassLoader
import scala.util.control.Exception.{ Catcher, catching, ultimately, unwrapping }
@@ -23,7 +28,7 @@ import io.{ PlainFile, VirtualDirectory }
import reporters.{ ConsoleReporter, Reporter }
import symtab.{ Flags, Names }
import util.{ SourceFile, BatchSourceFile, ClassPath }
-import scala.util.NameTransformer
+import scala.reflect.NameTransformer
import scala.tools.nsc.{ InterpreterResults => IR }
import interpreter._
import Interpreter._
@@ -68,28 +73,77 @@ import Interpreter._
* @author Moez A. Abdel-Gawad
* @author Lex Spoon
*/
-class Interpreter(val settings: Settings, out: PrintWriter)
-{
+class Interpreter(val settings: Settings, out: PrintWriter) {
+ /** construct an interpreter that reports to Console */
+ def this(settings: Settings) = this(settings, new NewLinePrintWriter(new ConsoleWriter, true))
+ def this() = this(new Settings())
+
/** directory to save .class files to */
val virtualDirectory = new VirtualDirectory("(memory)", None)
- /** the compiler to compile expressions with */
- val compiler: Global = newCompiler(settings, reporter)
+ /** reporter */
+ object reporter extends ConsoleReporter(settings, null, out) {
+ override def printMessage(msg: String) {
+ out println clean(msg)
+ out.flush()
+ }
+ }
+
+ /** We're going to go to some trouble to initialize the compiler asynchronously.
+ * It's critical that nothing call into it until it's been initialized or we will
+ * run into unrecoverable issues, but the perceived repl startup time goes
+ * through the roof if we wait for it. So we initialize it with a future and
+ * use a lazy val to ensure that any attempt to use the compiler object waits
+ * on the future.
+ */
+ private val _compiler: Global = newCompiler(settings, reporter)
+ private def _initialize(): Boolean = {
+ val source = """
+ |// this is assembled to force the loading of approximately the
+ |// classes which will be loaded on the first expression anyway.
+ |class $repl_$init {
+ | val x = "abc".reverse.length + (5 max 5)
+ | scala.runtime.ScalaRunTime.stringOf(x)
+ |}
+ |""".stripMargin
+
+ val run = new _compiler.Run()
+ run compileSources List(new BatchSourceFile("<init>", source))
+ if (settings.debug.value) {
+ out println "Repl compiler initialized."
+ out.flush()
+ }
+ true
+ }
+
+ // set up initialization future
+ private var _isInitialized: () => Boolean = () => false
+ def initialize() = synchronized {
+ if (!_isInitialized())
+ _isInitialized = scala.concurrent.ops future _initialize()
+ }
+
+ /** the public, go through the future compiler */
+ lazy val compiler: Global = {
+ initialize()
+ _isInitialized() // blocks until it is
+
+ _compiler
+ }
import compiler.{ Traverser, CompilationUnit, Symbol, Name, Type }
import compiler.{
Tree, TermTree, ValOrDefDef, ValDef, DefDef, Assign, ClassDef,
ModuleDef, Ident, Select, TypeDef, Import, MemberDef, DocDef,
- EmptyTree }
- import compiler.{ nme, newTermName }
+ ImportSelector, EmptyTree, NoType }
+ import compiler.{ nme, newTermName, newTypeName }
import nme.{
INTERPRETER_VAR_PREFIX, INTERPRETER_SYNTHVAR_PREFIX, INTERPRETER_LINE_PREFIX,
INTERPRETER_IMPORT_WRAPPER, INTERPRETER_WRAPPER_SUFFIX, USCOREkw
}
- /** construct an interpreter that reports to Console */
- def this(settings: Settings) =
- this(settings, new NewLinePrintWriter(new ConsoleWriter, true))
+ import compiler.definitions
+ import definitions.{ EmptyPackage, getMember }
/** whether to print out result lines */
private[nsc] var printResults: Boolean = true
@@ -118,11 +172,15 @@ class Interpreter(val settings: Settings, out: PrintWriter)
/** interpreter settings */
lazy val isettings = new InterpreterSettings(this)
- object reporter extends ConsoleReporter(settings, null, out) {
- override def printMessage(msg: String) {
- out.print(clean(msg) + "\n"); out.flush()
+ /** Heuristically strip interpreter wrapper prefixes
+ * from an interpreter output string.
+ */
+ def stripWrapperGunk(str: String): String =
+ if (isettings.unwrapStrings) {
+ val wrapregex = """(line[0-9]+\$object[$.])?(\$iw[$.])*"""
+ str.replaceAll(wrapregex, "")
}
- }
+ else str
/** Instantiate a compiler. Subclasses can override this to
* change the compiler class used by this interpreter. */
@@ -132,17 +190,7 @@ class Interpreter(val settings: Settings, out: PrintWriter)
}
/** the compiler's classpath, as URL's */
- val compilerClasspath: List[URL] = {
- def parseURL(s: String): Option[URL] =
- catching(classOf[MalformedURLException]) opt new URL(s)
-
- val classpathPart =
- ClassPath.expandPath(compiler.settings.classpath.value).map(s => new File(s).toURI.toURL)
- val codebasePart =
- (compiler.settings.Xcodebase.value.split(" ")).toList flatMap parseURL
-
- classpathPart ::: codebasePart
- }
+ lazy val compilerClasspath: List[URL] = new PathResolver(settings) asURLs
/* A single class loader is used for all commands interpreted by this Interpreter.
It would also be possible to create a new class loader for each command
@@ -157,7 +205,14 @@ class Interpreter(val settings: Settings, out: PrintWriter)
shadow the old ones, and old code objects refer to the old
definitions.
*/
- private var classLoader: AbstractFileClassLoader = makeClassLoader()
+ private var _classLoader: AbstractFileClassLoader = null
+ def resetClassLoader() = _classLoader = makeClassLoader()
+ def classLoader: AbstractFileClassLoader = {
+ if (_classLoader == null)
+ resetClassLoader()
+
+ _classLoader
+ }
private def makeClassLoader(): AbstractFileClassLoader = {
val parent =
if (parentClassLoader == null) ScalaClassLoader fromURLs compilerClasspath
@@ -170,17 +225,57 @@ class Interpreter(val settings: Settings, out: PrintWriter)
c.getMethod(name, classOf[Object])
protected def parentClassLoader: ClassLoader = this.getClass.getClassLoader()
+ def getInterpreterClassLoader() = classLoader
// Set the current Java "context" class loader to this interpreter's class loader
def setContextClassLoader() = classLoader.setAsContext()
/** the previous requests this interpreter has processed */
private val prevRequests = new ArrayBuffer[Request]()
- val prevImports = new ListBuffer[Import]()
+ private val usedNameMap = new HashMap[Name, Request]()
+ private val boundNameMap = new HashMap[Name, Request]()
+ private def allHandlers = prevRequests.toList flatMap (_.handlers)
+
+ /** Most recent tree handled which wasn't wholly synthetic. */
+ private def mostRecentlyHandledTree: Option[Tree] = {
+ for {
+ req <- prevRequests.reverse
+ handler <- req.handlers.reverse
+ name <- handler.generatesValue
+ if !isSynthVarName(name)
+ } return Some(handler.member)
- private def allUsedNames = prevRequests.toList.flatMap(_.usedNames).removeDuplicates
- private def allBoundNames = prevRequests.toList.flatMap(_.boundNames).removeDuplicates
- // private def allImportedNames = prevImports.toList.flatMap(_.importedNames).removeDuplicates
+ None
+ }
+
+ def recordRequest(req: Request) {
+ def tripart[T](set1: Set[T], set2: Set[T]) = {
+ val intersect = set1 intersect set2
+ List(set1 -- intersect, intersect, set2 -- intersect)
+ }
+
+ prevRequests += req
+ req.usedNames foreach (x => usedNameMap(x) = req)
+ req.boundNames foreach (x => boundNameMap(x) = req)
+
+ // XXX temporarily putting this here because of tricky initialization order issues
+ // so right now it's not bound until after you issue a command.
+ if (prevRequests.size == 1)
+ quietBind("settings", "scala.tools.nsc.InterpreterSettings", isettings)
+
+ // println("\n s1 = %s\n s2 = %s\n s3 = %s".format(
+ // tripart(usedNameMap.keysIterator.toSet, boundNameMap.keysIterator.toSet): _*
+ // ))
+ }
+
+ private def keyList[T](x: collection.Map[T, _]): List[T] = x.keys.toList sortBy (_.toString)
+ def allUsedNames = keyList(usedNameMap)
+ def allBoundNames = keyList(boundNameMap)
+ def allSeenTypes = prevRequests.toList flatMap (_.typeOf.values.toList) distinct
+ def allValueGeneratingNames = allHandlers flatMap (_.generatesValue)
+ def allImplicits = partialFlatMap(allHandlers) {
+ case x: MemberHandler if x.definesImplicit => x.boundNames
+ }
/** Generates names pre0, pre1, etc. via calls to apply method */
class NameCreator(pre: String) {
@@ -203,17 +298,21 @@ class Interpreter(val settings: Settings, out: PrintWriter)
}
/** allocate a fresh line name */
- private val lineNameCreator = new NameCreator(INTERPRETER_LINE_PREFIX)
+ private lazy val lineNameCreator = new NameCreator(INTERPRETER_LINE_PREFIX)
/** allocate a fresh var name */
- private val varNameCreator = new NameCreator(INTERPRETER_VAR_PREFIX)
+ private lazy val varNameCreator = new NameCreator(INTERPRETER_VAR_PREFIX)
/** allocate a fresh internal variable name */
- private def synthVarNameCreator = new NameCreator(INTERPRETER_SYNTHVAR_PREFIX)
+ private lazy val synthVarNameCreator = new NameCreator(INTERPRETER_SYNTHVAR_PREFIX)
/** Check if a name looks like it was generated by varNameCreator */
private def isGeneratedVarName(name: String): Boolean = varNameCreator didGenerate name
private def isSynthVarName(name: String): Boolean = synthVarNameCreator didGenerate name
+ private def isSynthVarName(name: Name): Boolean = synthVarNameCreator didGenerate name.toString
+
+ def getVarName = varNameCreator()
+ def getSynthVarName = synthVarNameCreator()
/** generate a string using a routine that wants to write on a stream */
private def stringFrom(writer: PrintWriter => Unit): String = {
@@ -224,7 +323,7 @@ class Interpreter(val settings: Settings, out: PrintWriter)
stringWriter.toString
}
- /** Truncate a string if it is longer than settings.maxPrintString */
+ /** Truncate a string if it is longer than isettings.maxPrintString */
private def truncPrintString(str: String): String = {
val maxpr = isettings.maxPrintString
val trailer = "..."
@@ -252,6 +351,7 @@ class Interpreter(val settings: Settings, out: PrintWriter)
str.flush()
})
}
+ def indentString(s: String) = s split "\n" map (spaces + _ + "\n") mkString
implicit def name2string(name: Name) = name.toString
@@ -284,41 +384,41 @@ class Interpreter(val settings: Settings, out: PrintWriter)
* should be taken. Removes requests which cannot contribute
* useful imports for the specified set of wanted names.
*/
- case class ReqAndHandler(req: Request, handler: MemberHandler)
+ case class ReqAndHandler(req: Request, handler: MemberHandler) { }
+
def reqsToUse: List[ReqAndHandler] = {
/** Loop through a list of MemberHandlers and select which ones to keep.
* 'wanted' is the set of names that need to be imported.
*/
def select(reqs: List[ReqAndHandler], wanted: Set[Name]): List[ReqAndHandler] = {
val isWanted = wanted contains _
- def keepHandler(handler: MemberHandler): Boolean = {
- import handler._
- // Single symbol imports might be implicits! See bug #1752. Rather than
- // try to finesse this, we will mimic all imports for now.
- def isImport = handler.isInstanceOf[ImportHandler]
- definesImplicit || isImport || (importedNames ++ boundNames).exists(isWanted)
+ // Single symbol imports might be implicits! See bug #1752. Rather than
+ // try to finesse this, we will mimic all imports for now.
+ def keepHandler(handler: MemberHandler) = handler match {
+ case _: ImportHandler => true
+ case x => x.definesImplicit || (x.boundNames exists isWanted)
}
reqs match {
case Nil => Nil
case rh :: rest if !keepHandler(rh.handler) => select(rest, wanted)
case rh :: rest =>
+ val importedNames = rh.handler match { case x: ImportHandler => x.importedNames ; case _ => Nil }
import rh.handler._
val newWanted = wanted ++ usedNames -- boundNames -- importedNames
rh :: select(rest, newWanted)
}
}
- val rhpairs = for {
- req <- prevRequests.toList.reverse
- handler <- req.handlers
- } yield ReqAndHandler(req, handler)
-
+ /** Flatten the handlers out and pair each with the original request */
+ val rhpairs = prevRequests.reverse.toList flatMap { req =>
+ req.handlers map (ReqAndHandler(req, _))
+ }
select(rhpairs, wanted).reverse
}
val code, trailingBraces, accessPath = new StringBuffer
- val currentImps = mutable.Set.empty[Name]
+ val currentImps = HashSet[Name]()
// add code for a new object to hold some imports
def addWrapper() {
@@ -334,32 +434,33 @@ class Interpreter(val settings: Settings, out: PrintWriter)
// loop through previous requests, adding imports for each one
for (ReqAndHandler(req, handler) <- reqsToUse) {
- import handler._
- // If the user entered an import, then just use it; add an import wrapping
- // level if the import might conflict with some other import
- if (importsWildcard || currentImps.exists(importedNames.contains))
- addWrapper()
-
- if (member.isInstanceOf[Import])
- code append (member.toString + "\n")
-
- // give wildcard imports a import wrapper all to their own
- if (importsWildcard) addWrapper()
- else currentImps ++= importedNames
-
- // For other requests, import each bound variable.
- // import them explicitly instead of with _, so that
- // ambiguity errors will not be generated. Also, quote
- // the name of the variable, so that we don't need to
- // handle quoting keywords separately.
- for (imv <- boundNames) {
- if (currentImps contains imv) addWrapper()
-
- code append ("import " + req.fullPath(imv))
- currentImps += imv
+ handler match {
+ // If the user entered an import, then just use it; add an import wrapping
+ // level if the import might conflict with some other import
+ case x: ImportHandler =>
+ if (x.importsWildcard || (currentImps exists (x.importedNames contains _)))
+ addWrapper()
+
+ code append (x.member.toString + "\n")
+
+ // give wildcard imports a import wrapper all to their own
+ if (x.importsWildcard) addWrapper()
+ else currentImps ++= x.importedNames
+
+ // For other requests, import each bound variable.
+ // import them explicitly instead of with _, so that
+ // ambiguity errors will not be generated. Also, quote
+ // the name of the variable, so that we don't need to
+ // handle quoting keywords separately.
+ case x =>
+ for (imv <- x.boundNames) {
+ if (currentImps contains imv) addWrapper()
+
+ code append ("import %s\n" format (req fullPath imv))
+ currentImps += imv
+ }
}
}
-
// add one extra wrapper, to prevent warnings in the common case of
// redefining the value bound in the last interpreter request.
addWrapper()
@@ -387,12 +488,43 @@ class Interpreter(val settings: Settings, out: PrintWriter)
}
/** For :power - create trees and type aliases from code snippets. */
+ def mkContext(code: String = "") = compiler.analyzer.rootContext(mkUnit(code))
+ def mkAlias(name: String, what: String) = interpret("type %s = %s".format(name, what))
+ def mkSourceFile(code: String) = new BatchSourceFile("<console>", code)
+ def mkUnit(code: String) = new CompilationUnit(mkSourceFile(code))
+
def mkTree(code: String): Tree = mkTrees(code).headOption getOrElse EmptyTree
def mkTrees(code: String): List[Tree] = parse(code) getOrElse Nil
- def mkType(name: String, what: String) = interpret("type " + name + " = " + what)
+ def mkTypedTrees(code: String*): List[compiler.Tree] = {
+ class TyperRun extends compiler.Run {
+ override def stopPhase(name: String) = name == "superaccessors"
+ }
+
+ reporter.reset
+ val run = new TyperRun
+ run compileSources (code.toList.zipWithIndex map {
+ case (s, i) => new BatchSourceFile("<console %d>".format(i), s)
+ })
+ run.units.toList map (_.body)
+ }
+ def mkTypedTree(code: String) = mkTypedTrees(code).head
+
+ def mkType(id: String): compiler.Type = {
+ // if it's a recognized identifier, the type of that; otherwise treat the
+ // String like it is itself a type (e.g. scala.collection.Map) .
+ val typeName = typeForIdent(id) getOrElse id
+
+ try definitions.getClass(newTermName(typeName)).tpe
+ catch { case _: Throwable => NoType }
+ }
+
+ private[nsc] val powerMkImports = List(
+ "mkContext", "mkTree", "mkTrees", "mkAlias", "mkSourceFile", "mkUnit", "mkType", "mkTypedTree", "mkTypedTrees"
+ // , "treeWrapper"
+ )
/** Compile an nsc SourceFile. Returns true if there are
- * no compilation errors, or false othrewise.
+ * no compilation errors, or false otherwise.
*/
def compileSources(sources: SourceFile*): Boolean = {
reporter.reset
@@ -406,11 +538,23 @@ class Interpreter(val settings: Settings, out: PrintWriter)
def compileString(code: String): Boolean =
compileSources(new BatchSourceFile("<script>", code))
+ def compileAndSaveRun(label: String, code: String) = {
+ if (isReplDebug) {
+ parse(code) match {
+ case Some(trees) => trees foreach (t => DBG(compiler.asCompactString(t)))
+ case _ => DBG("Parse error:\n\n" + code)
+ }
+ }
+ val run = new compiler.Run()
+ run.compileSources(List(new BatchSourceFile(label, code)))
+ run
+ }
+
/** Build a request from the user. <code>trees</code> is <code>line</code>
* after being parsed.
*/
- private def buildRequest(trees: List[Tree], line: String, lineName: String): Request =
- new Request(line, lineName)
+ private def buildRequest(line: String, lineName: String, trees: List[Tree]): Request =
+ new Request(line, lineName, trees)
private def chooseHandler(member: Tree): MemberHandler = member match {
case member: DefDef => new DefHandler(member)
@@ -424,6 +568,29 @@ class Interpreter(val settings: Settings, out: PrintWriter)
case member => new GenericHandler(member)
}
+ private def requestFromLine(line: String, synthetic: Boolean): Either[IR.Result, Request] = {
+ val trees = parse(indentCode(line)) match {
+ case None => return Left(IR.Incomplete)
+ case Some(Nil) => return Left(IR.Error) // parse error or empty input
+ case Some(trees) => trees
+ }
+
+ // use synthetic vars to avoid filling up the resXX slots
+ def varName = if (synthetic) getSynthVarName else getVarName
+
+ // Treat a single bare expression specially. This is necessary due to it being hard to
+ // modify code at a textual level, and it being hard to submit an AST to the compiler.
+ if (trees.size == 1) trees.head match {
+ case _:Assign => // we don't want to include assignments
+ case _:TermTree | _:Ident | _:Select => // ... but do want these as valdefs.
+ return requestFromLine("val %s =\n%s".format(varName, line), synthetic)
+ case _ =>
+ }
+
+ // figure out what kind of request
+ Right(buildRequest(line, lineNameCreator(), trees))
+ }
+
/** <p>
* Interpret one line of input. All feedback, including parse errors
* and evaluation results, are printed via the supplied compiler's
@@ -438,29 +605,14 @@ class Interpreter(val settings: Settings, out: PrintWriter)
* @param line ...
* @return ...
*/
- def interpret(line: String): IR.Result = {
- // initialize the compiler
- if (prevRequests.isEmpty) new compiler.Run()
-
- // parse
- val trees = parse(indentCode(line)) match {
- case None => return IR.Incomplete
- case Some(Nil) => return IR.Error // parse error or empty input
- case Some(trees) => trees
- }
-
- // Treat a single bare expression specially. This is necessary due to it being hard to
- // modify code at a textual level, and it being hard to submit an AST to the compiler.
- if (trees.size == 1) trees.head match {
- case _:Assign => // we don't want to include assignments
- case _:TermTree | _:Ident | _:Select =>
- return interpret("val %s =\n%s".format(varNameCreator(), line))
- case _ =>
+ def interpret(line: String): IR.Result = interpret(line, false)
+ def interpret(line: String, synthetic: Boolean): IR.Result = {
+ val req = requestFromLine(line, synthetic) match {
+ case Left(result) => return result
+ case Right(req) => req
}
-
- // figure out what kind of request
- val req = buildRequest(trees, line, lineNameCreator())
- // null is a disallowed statement type; otherwise compile and fail if false (implying e.g. a type error)
+ // null indicates a disallowed statement type; otherwise compile and
+ // fail if false (implying e.g. a type error)
if (req == null || !req.compile)
return IR.Error
@@ -469,14 +621,16 @@ class Interpreter(val settings: Settings, out: PrintWriter)
out print clean(result)
if (succeeded) {
- prevRequests += req // book-keeping
+ if (!synthetic)
+ recordRequest(req) // book-keeping
+
IR.Success
}
else IR.Error
}
/** A name creator used for objects created by <code>bind()</code>. */
- private val newBinder = new NameCreator("binder")
+ private lazy val newBinder = new NameCreator("binder")
/** Bind a specified name to a specified value. The name may
* later be used by expressions passed to interpret.
@@ -487,13 +641,13 @@ class Interpreter(val settings: Settings, out: PrintWriter)
* @return an indication of whether the binding succeeded
*/
def bind(name: String, boundType: String, value: Any): IR.Result = {
- val binderName = newBinder() // "binder" + binderNum()
+ val binderName = newBinder()
compileString("""
- | object %s {
- | var value: %s = _
- | def set(x: Any) = value = x.asInstanceOf[%s]
- | }
+ |object %s {
+ | var value: %s = _
+ | def set(x: Any) = value = x.asInstanceOf[%s]
+ |}
""".stripMargin.format(binderName, boundType, boundType))
val binderObject = loadByName(binderName)
@@ -509,7 +663,7 @@ class Interpreter(val settings: Settings, out: PrintWriter)
/** Reset this interpreter, forgetting all user-specified requests. */
def reset() {
virtualDirectory.clear
- classLoader = makeClassLoader
+ resetClassLoader()
lineNameCreator.reset()
varNameCreator.reset()
prevRequests.clear
@@ -548,17 +702,14 @@ class Interpreter(val settings: Settings, out: PrintWriter)
ivt.importVars.toList
}
def boundNames: List[Name] = Nil
- def valAndVarNames: List[Name] = Nil
- def defNames: List[Name] = Nil
- val importsWildcard = false
- val importedNames: Seq[Name] = Nil
- val definesImplicit = member match {
- case tree: MemberDef => tree.mods hasFlag Flags.IMPLICIT
- case _ => false
+ val definesImplicit = cond(member) {
+ case tree: MemberDef => tree.mods hasFlag Flags.IMPLICIT
}
+ def generatesValue: Option[Name] = None
def extraCodeToEvaluate(req: Request, code: PrintWriter) { }
def resultExtractionCode(req: Request, code: PrintWriter) { }
+
override def toString = "%s(usedNames = %s)".format(this.getClass, usedNames)
}
@@ -570,34 +721,30 @@ class Interpreter(val settings: Settings, out: PrintWriter)
lazy val isLazy = mods hasFlag Flags.LAZY
override lazy val boundNames = List(vname)
- override def valAndVarNames = boundNames
+ override def generatesValue = Some(vname)
override def resultExtractionCode(req: Request, code: PrintWriter) {
val isInternal = isGeneratedVarName(vname) && req.typeOfEnc(vname) == "Unit"
if (!mods.isPublic || isInternal) return
- lazy val extractor = """
- | {
- | val s = scala.runtime.ScalaRunTime.stringOf(%s)
- | val nl = if (s.contains('\n')) "\n" else ""
- | nl + s + "\n"
- | }
- """.stripMargin.format(req fullPath vname)
+ lazy val extractor = "scala.runtime.ScalaRunTime.stringOf(%s)".format(req fullPath vname)
// if this is a lazy val we avoid evaluating it here
val resultString = if (isLazy) codegenln(false, "<lazy>") else extractor
val codeToPrint =
- """ + "%s: %s = " + %s""" .
- format(prettyName, string2code(req.typeOf(vname)), resultString)
+ """ + "%s: %s = " + %s""".format(prettyName, string2code(req typeOf vname), resultString)
code print codeToPrint
}
}
private class DefHandler(defDef: DefDef) extends MemberHandler(defDef) {
- lazy val DefDef(mods, name, _, _, _, _) = defDef
+ lazy val DefDef(mods, name, _, vparamss, _, _) = defDef
override lazy val boundNames = List(name)
- override def defNames = boundNames
+ // true if 0-arity
+ override def generatesValue =
+ if (vparamss.isEmpty || vparamss.head.isEmpty) Some(name)
+ else None
override def resultExtractionCode(req: Request, code: PrintWriter) =
if (mods.isPublic) code print codegenln(name, ": ", req.typeOf(name))
@@ -606,7 +753,7 @@ class Interpreter(val settings: Settings, out: PrintWriter)
private class AssignHandler(member: Assign) extends MemberHandler(member) {
val lhs = member.lhs.asInstanceOf[Ident] // an unfortunate limitation
val helperName = newTermName(synthVarNameCreator())
- override val valAndVarNames = List(helperName)
+ override def generatesValue = Some(helperName)
override def extraCodeToEvaluate(req: Request, code: PrintWriter) =
code println """val %s = %s""".format(helperName, lhs)
@@ -624,6 +771,7 @@ class Interpreter(val settings: Settings, out: PrintWriter)
private class ModuleHandler(module: ModuleDef) extends MemberHandler(module) {
lazy val ModuleDef(mods, name, _) = module
override lazy val boundNames = List(name)
+ override def generatesValue = Some(name)
override def resultExtractionCode(req: Request, code: PrintWriter) =
code println codegenln("defined module ", name)
@@ -649,27 +797,22 @@ class Interpreter(val settings: Settings, out: PrintWriter)
private class ImportHandler(imp: Import) extends MemberHandler(imp) {
/** Whether this import includes a wildcard import */
- override val importsWildcard = imp.selectors.map(_.name) contains USCOREkw
+ val importsWildcard = imp.selectors map (_.name) contains USCOREkw
/** The individual names imported by this statement */
- override val importedNames: Seq[Name] = for {
- sel <- imp.selectors
- if (sel.rename != null && sel.rename != USCOREkw)
- name <- List(sel.rename.toTypeName, sel.rename.toTermName)
- }
- yield name
-
- // record the import
- prevImports += imp
+ val importedNames: List[Name] = (
+ imp.selectors
+ . map (x => x.rename)
+ . filter (x => x != null && x != USCOREkw)
+ . flatMap (x => List(x.toTypeName, x.toTermName))
+ )
override def resultExtractionCode(req: Request, code: PrintWriter) =
code println codegenln(imp.toString)
}
/** One line of code submitted by the user for interpretation */
- private class Request(val line: String, val lineName: String) {
- val trees = parse(line) getOrElse Nil
-
+ private class Request(val line: String, val lineName: String, val trees: List[Tree]) {
/** name to use for the object that will compute "line" */
def objectName = lineName + INTERPRETER_WRAPPER_SUFFIX
@@ -680,18 +823,26 @@ class Interpreter(val settings: Settings, out: PrintWriter)
val handlers: List[MemberHandler] = trees map chooseHandler
/** all (public) names defined by these statements */
- val boundNames = (ListSet() ++ handlers.flatMap(_.boundNames)).toList
+ val boundNames = handlers flatMap (_.boundNames)
/** list of names used by this expression */
- val usedNames: List[Name] = handlers.flatMap(_.usedNames)
+ val usedNames: List[Name] = handlers flatMap (_.usedNames)
+
+ /** def and val names */
+ def defNames = partialFlatMap(handlers) { case x: DefHandler => x.boundNames }
+ def valAndVarNames = partialFlatMap(handlers) {
+ case x: AssignHandler => List(x.helperName)
+ case x: ValHandler => boundNames
+ }
/** Code to import bound names from previous lines - accessPath is code to
- * append to objectName to access anything bound by request. */
+ * append to objectName to access anything bound by request.
+ */
val ComputedImports(importsPreamble, importsTrailer, accessPath) =
importsCode(Set.empty ++ usedNames)
/** Code to access a variable with the specified name */
- def fullPath(vname: String): String = "%s.`%s`\n".format(objectName + accessPath, vname)
+ def fullPath(vname: String): String = "%s.`%s`".format(objectName + accessPath, vname)
/** Code to access a variable with the specified name */
def fullPath(vname: Name): String = fullPath(vname.toString)
@@ -701,40 +852,47 @@ class Interpreter(val settings: Settings, out: PrintWriter)
/** generate the source code for the object that computes this request */
def objectSourceCode: String = stringFrom { code =>
- // whitespace compatible with interpreter.scala
- val preamble = """object %s {
- | %s%s
+ val preamble = """
+ |object %s {
+ | %s%s
""".stripMargin.format(objectName, importsPreamble, indentCode(toCompute))
- // val preamble = """
- // | object %s {
- // | %s %s
- // """.stripMargin.format(objectName, importsPreamble, indentCode(toCompute))
- val postamble = importsTrailer + "; }"
+ val postamble = importsTrailer + "\n}"
code println preamble
handlers foreach { _.extraCodeToEvaluate(this, code) }
code println postamble
}
- /** Types of variables defined by this request. They are computed
- after compilation of the main object */
- var typeOf: Map[Name, String] = _
- def typeOfEnc(vname: Name) = typeOf(compiler encode vname)
-
/** generate source code for the object that retrieves the result
from objectSourceCode */
def resultObjectSourceCode: String = stringFrom { code =>
+ /** We only want to generate this code when the result
+ * is a value which can be referred to as-is.
+ */
+ val valueExtractor = handlers.last.generatesValue match {
+ case Some(vname) if typeOf contains vname =>
+ """
+ |lazy val scala_repl_value = {
+ | scala_repl_result
+ | %s
+ |}""".stripMargin.format(fullPath(vname))
+ case _ => ""
+ }
+
+ // first line evaluates object to make sure constructor is run
+ // initial "" so later code can uniformly be: + etc
val preamble = """
- | object %s {
- | val scala_repl_result: String = {
- | %s // evaluate object to make sure constructor is run
- | ("" // an initial "" so later code can uniformly be: + etc
- """.stripMargin.format(resultObjectName, objectName + accessPath)
+ |object %s {
+ | %s
+ | val scala_repl_result: String = {
+ | %s
+ | (""
+ """.stripMargin.format(resultObjectName, valueExtractor, objectName + accessPath)
val postamble = """
- | )
- | }
- | }
+ | )
+ | }
+ |}
""".stripMargin
code println preamble
@@ -742,6 +900,23 @@ class Interpreter(val settings: Settings, out: PrintWriter)
code println postamble
}
+ // compile the object containing the user's code
+ lazy val objRun = compileAndSaveRun("<console>", objectSourceCode)
+
+ // compile the result-extraction object
+ lazy val extractionObjectRun = compileAndSaveRun("<console>", resultObjectSourceCode)
+
+ lazy val loadedResultObject = loadByName(resultObjectName)
+
+ def extractionValue(): Option[AnyRef] = {
+ // ensure it has run
+ extractionObjectRun
+
+ // load it and retrieve the value
+ try Some(loadedResultObject getMethod "scala_repl_value" invoke loadedResultObject)
+ catch { case _: Exception => None }
+ }
+
/** Compile the object file. Returns whether the compilation succeeded.
* If all goes well, the "types" map is computed. */
def compile(): Boolean = {
@@ -749,45 +924,42 @@ class Interpreter(val settings: Settings, out: PrintWriter)
reporter.reset
// compile the main object
- val objRun = new compiler.Run()
- objRun.compileSources(List(new BatchSourceFile("<console>", objectSourceCode)))
+ objRun
+
+ // bail on error
if (reporter.hasErrors)
return false
// extract and remember types
- typeOf = findTypes(objRun)
+ typeOf
// compile the result-extraction object
- new compiler.Run().compileSources(List(new BatchSourceFile("<console>", resultObjectSourceCode)))
+ extractionObjectRun
// success
!reporter.hasErrors
}
- /** Dig the types of all bound variables out of the compiler run.
- *
- * @param objRun ...
- * @return ...
- */
- def findTypes(objRun: compiler.Run): Map[Name, String] = {
- import compiler.definitions.{ EmptyPackage, getMember }
- def valAndVarNames = handlers flatMap { _.valAndVarNames }
- def defNames = handlers flatMap { _.defNames }
- def getTypes(names: List[Name], nameMap: Name => Name): Map[Name, String] = {
- def atNextPhase[T](op: => T): T = compiler.atPhase(objRun.typerPhase.next)(op)
+ def atNextPhase[T](op: => T): T = compiler.atPhase(objRun.typerPhase.next)(op)
- /** the outermost wrapper object */
- val outerResObjSym: Symbol = getMember(EmptyPackage, newTermName(objectName))
+ /** The outermost wrapper object */
+ lazy val outerResObjSym: Symbol = getMember(EmptyPackage, newTermName(objectName))
- /** the innermost object inside the wrapper, found by
- * following accessPath into the outer one. */
- val resObjSym =
- accessPath.split("\\.").foldLeft(outerResObjSym) { (sym, name) =>
- if (name == "") sym else
- atNextPhase(sym.info member newTermName(name))
- }
+ /** The innermost object inside the wrapper, found by
+ * following accessPath into the outer one. */
+ lazy val resObjSym =
+ accessPath.split("\\.").foldLeft(outerResObjSym) { (sym, name) =>
+ if (name == "") sym else
+ atNextPhase(sym.info member newTermName(name))
+ }
+ /* typeOf lookup with encoding */
+ def typeOfEnc(vname: Name) = typeOf(compiler encode vname)
+
+ /** Types of variables defined by this request. */
+ lazy val typeOf: Map[Name, String] = {
+ def getTypes(names: List[Name], nameMap: Name => Name): Map[Name, String] = {
names.foldLeft(Map.empty[Name, String]) { (map, name) =>
val rawType = atNextPhase(resObjSym.info.member(name).tpe)
// the types are all =>T; remove the =>
@@ -800,15 +972,12 @@ class Interpreter(val settings: Settings, out: PrintWriter)
}
}
- val names1 = getTypes(valAndVarNames, nme.getterToLocal(_))
- val names2 = getTypes(defNames, identity)
- names1 ++ names2
+ getTypes(valAndVarNames, nme.getterToLocal(_)) ++ getTypes(defNames, identity)
}
/** load and run the code using reflection */
def loadAndRun: (String, Boolean) = {
- val resultObject: Class[_] = loadByName(resultObjectName)
- val resultValMethod: reflect.Method = resultObject getMethod "scala_repl_result"
+ val resultValMethod: reflect.Method = loadedResultObject getMethod "scala_repl_result"
// XXX if wrapperExceptions isn't type-annotated we crash scalac
val wrapperExceptions: List[Class[_ <: Throwable]] =
List(classOf[InvocationTargetException], classOf[ExceptionInInitializerError])
@@ -824,7 +993,7 @@ class Interpreter(val settings: Settings, out: PrintWriter)
catching(onErr) {
unwrapping(wrapperExceptions: _*) {
- (resultValMethod.invoke(resultObject).toString, true)
+ (resultValMethod.invoke(loadedResultObject).toString, true)
}
}
}
@@ -833,122 +1002,166 @@ class Interpreter(val settings: Settings, out: PrintWriter)
/** These methods are exposed so REPL commands can access them.
* The command infrastructure is in InterpreterLoop.
*/
- def dumpState(xs: List[String]): String = {
- // println("Imports for " + req + " => " + req.importsPreamble)
- // req.handlers foreach { h => println("Handler " + h + " used names: " + h.usedNames) }
- // req.trees foreach { x => println("Tree: " + x) }
- // xs foreach { x => println("membersOfIdentifier(" + x + ") = " + membersOfIdentifier(x)) }
- List(
- "allUsedNames = " + allUsedNames,
- "allBoundNames = " + allBoundNames,
- prevRequests.toList.map(req => " \"" + req.line + "\" => " + req.objectSourceCode)
- ).mkString("", "\n", "\n")
- }
-
- // very simple right now, will get more interesting
- def dumpTrees(xs: List[String]): String = {
- val treestrs = (
- for (x <- xs ; name <- nameOfIdent(x) ; req <- requestForName(name))
- yield req.trees
- ).flatten
-
- if (treestrs.isEmpty) "No trees found."
- else treestrs.map(t => t.toString + " (" + t.getClass.getSimpleName + ")\n").mkString
- }
+ def dumpState(xs: List[String]): String = """
+ | Names used: %s
+ |
+ | Identifiers: %s
+ |
+ | synthvars: %d
+ """.stripMargin.format(
+ allUsedNames mkString " ",
+ unqualifiedIds mkString " ",
+ allBoundNames filter isSynthVarName size
+ )
+
+ // def dumpTrees(xs: List[String]): String = {
+ // val treestrs = (xs map requestForIdent).flatten flatMap (_.trees)
+ //
+ // if (treestrs.isEmpty) "No trees found."
+ // else treestrs.map(t => t.toString + " (" + t.getClass.getSimpleName + ")\n").mkString
+ // }
def powerUser(): String = {
beQuietDuring {
- this.bind("interpreter", "scala.tools.nsc.Interpreter", this)
+ this.bind("repl", "scala.tools.nsc.Interpreter", this)
this.bind("global", "scala.tools.nsc.Global", compiler)
- interpret("""import interpreter.{ mkType, mkTree, mkTrees }""")
+ interpret("import repl.{ %s, eval }".format(powerMkImports mkString ", "), false)
}
"""** Power User mode enabled - BEEP BOOP **
- |** New vals! Try interpreter, global **
+ |** New vals! Try repl, global **
|** New cmds! :help to discover them **
|** New defs! Give these a whirl: **
- |** mkType("Fn", "(String, Int) => Int") **
+ |** mkAlias("Fn", "(String, Int) => Int") **
|** mkTree("def f(x: Int, y: Int) = x+y") **""".stripMargin
}
- def nameOfIdent(line: String): Option[Name] = {
- parse(line) match {
- case Some(List(Ident(x))) => Some(x)
- case _ => None
- }
- }
-
/** Returns the name of the most recent interpreter result.
* Mostly this exists so you can conveniently invoke methods on
* the previous result.
*/
def mostRecentVar: String =
- prevRequests.last.handlers.last.member match {
+ if (mostRecentlyHandledTree.isEmpty) ""
+ else mostRecentlyHandledTree.get match {
case x: ValOrDefDef => x.name
case Assign(Ident(name), _) => name
case ModuleDef(_, name, _) => name
- case _ => varNameCreator.mostRecent
+ case _ => onull(varNameCreator.mostRecent)
}
- private def requestForName(name: Name): Option[Request] = {
- for (req <- prevRequests.toList.reverse) {
- if (req.handlers.exists(_.boundNames contains name))
- return Some(req)
- }
- None
+ private def requestForName(name: Name): Option[Request] =
+ prevRequests.reverse find (_.boundNames contains name)
+
+ private def requestForIdent(line: String): Option[Request] =
+ requestForName(newTermName(line))
+
+ def typeForIdent(id: String): Option[String] =
+ requestForIdent(id) map (_ typeOf newTermName(id))
+
+ def methodsOf(name: String) =
+ evalExpr[List[String]](methodsCode(name)) map (x => NameTransformer.decode(getOriginalName(x)))
+
+ def completionAware(name: String) = {
+ // XXX working around "object is not a value" crash, i.e.
+ // import java.util.ArrayList ; ArrayList.<tab>
+ clazzForIdent(name) flatMap (_ => evalExpr[Option[CompletionAware]](asCompletionAwareCode(name)))
}
- // XXX at the moment this is imperfect because scala's protected semantics
- // differ from java's, so protected methods appear public via reflection;
- // yet scala enforces the protection. The result is that protected members
- // appear in completion yet cannot actually be called. Fixing this
- // properly requires a scala.reflect.* API. Fixing it uglily is possible
- // too (cast to structural type!) but I deem poor use of energy.
- private val filterFlags: Int = {
- import java.lang.reflect.Modifier._
- STATIC | PRIVATE | PROTECTED
+ def extractionValueForIdent(id: String): Option[AnyRef] =
+ requestForIdent(id) flatMap (_.extractionValue)
+
+ /** Executes code looking for a manifest of type T.
+ */
+ def manifestFor[T: Manifest] =
+ evalExpr[Manifest[T]]("""manifest[%s]""".format(manifest[T]))
+
+ /** Executes code looking for an implicit value of type T.
+ */
+ def implicitFor[T: Manifest] = {
+ val s = manifest[T].toString
+ evalExpr[Option[T]]("{ def f(implicit x: %s = null): %s = x ; Option(f) }".format(s, s))
+ // We don't use implicitly so as to fail without failing.
+ // evalExpr[T]("""implicitly[%s]""".format(manifest[T]))
}
- private val methodsCode = """ .
- | asInstanceOf[AnyRef].getClass.getMethods .
- | filter(x => (x.getModifiers & %d) == 0) .
- | map(_.getName) .
- | mkString(" ")""".stripMargin.format(filterFlags)
+ /** Executes code looking for an implicit conversion from the type
+ * of the given identifier to CompletionAware.
+ */
+ def completionAwareImplicit[T](id: String) = {
+ val f1string = "%s => %s".format(typeForIdent(id).get, classOf[CompletionAware].getName)
+ val code = """{
+ | def f(implicit x: (%s) = null): %s = x
+ | val f1 = f
+ | if (f1 == null) None else Some(f1(%s))
+ |}""".stripMargin.format(f1string, f1string, id)
+
+ evalExpr[Option[CompletionAware]](code)
+ }
+
+ def clazzForIdent(id: String): Option[Class[_]] =
+ extractionValueForIdent(id) flatMap (x => Option(x) map (_.getClass))
+
+ private def methodsCode(name: String) =
+ "%s.%s(%s)".format(classOf[ReflectionCompletion].getName, "methodsOf", name)
+
+ private def asCompletionAwareCode(name: String) =
+ "%s.%s(%s)".format(classOf[CompletionAware].getName, "unapply", name)
private def getOriginalName(name: String): String =
nme.originalName(newTermName(name)).toString
- /** The main entry point for tab-completion. When the user types x.<tab>
- * this method is called with "x" as an argument, and it discovers the
- * fields and methods of x via reflection and returns their names to jline.
+ case class InterpreterEvalException(msg: String) extends Exception(msg)
+ def evalError(msg: String) = throw InterpreterEvalException(msg)
+
+ /** The user-facing eval in :power mode wraps an Option.
*/
- def membersOfIdentifier(line: String): List[String] = {
- import Completion.{ shouldHide }
- import NameTransformer.{ decode, encode } // e.g. $plus$plus => ++
-
- val res = beQuietDuring {
- for (name <- nameOfIdent(line) ; req <- requestForName(name)) yield {
- if (interpret("val " + synthVarNameCreator() + " = " + name + methodsCode) != IR.Success) Nil
- else {
- val result = prevRequests.last.resultObjectName
- val resultObj = (classLoader tryToInitializeClass result).get
- val valMethod = resultObj getMethod "scala_repl_result"
- val str = valMethod.invoke(resultObj).toString
-
- str.substring(str.indexOf('=') + 1).trim .
- split(" ").toList .
- map(x => decode(getOriginalName(x))) .
- filterNot(shouldHide) .
- removeDuplicates
- }
- }
+ def eval[T: Manifest](line: String): Option[T] =
+ try Some(evalExpr[T](line))
+ catch { case InterpreterEvalException(msg) => out println indentString(msg) ; None }
+
+ def evalExpr[T: Manifest](line: String): T = {
+ // Nothing means the type could not be inferred.
+ if (manifest[T] eq Manifest.Nothing)
+ evalError("Could not infer type: try 'eval[SomeType](%s)' instead".format(line))
+
+ val lhs = getSynthVarName
+ beQuietDuring { interpret("val " + lhs + " = { " + line + " } ") }
+
+ // TODO - can we meaningfully compare the inferred type T with
+ // the internal compiler Type assigned to lhs?
+ // def assignedType = prevRequests.last.typeOf(newTermName(lhs))
+
+ val req = requestFromLine(lhs, true) match {
+ case Left(result) => evalError(result.toString)
+ case Right(req) => req
}
+ if (req == null || !req.compile || req.handlers.size != 1)
+ evalError("Eval error.")
+
+ try req.extractionValue.get.asInstanceOf[T] catch {
+ case e: Exception => evalError(e.getMessage)
+ }
+ }
- res getOrElse Nil
+ def interpretExpr[T: Manifest](code: String): Option[T] = beQuietDuring {
+ interpret(code) match {
+ case IR.Success =>
+ try prevRequests.last.extractionValue map (_.asInstanceOf[T])
+ catch { case e: Exception => out println e ; None }
+ case _ => None
+ }
}
/** Another entry point for tab-completion, ids in scope */
- def unqualifiedIds(): List[String] =
- allBoundNames map (_.toString) filterNot isSynthVarName
+ private def unqualifiedIdNames() = partialFlatMap(allHandlers) {
+ case x: AssignHandler => List(x.helperName)
+ case x: ValHandler => List(x.vname)
+ case x: ModuleHandler => List(x.name)
+ case x: DefHandler => List(x.name)
+ case x: ImportHandler => x.importedNames
+ } filterNot isSynthVarName
+
+ /** Another entry point for tab-completion, ids in scope */
+ def unqualifiedIds() = (unqualifiedIdNames() map (_.toString)).distinct.sorted
/** For static/object method completion */
def getClassObject(path: String): Option[Class[_]] = classLoader tryToLoadClass path
@@ -956,24 +1169,58 @@ class Interpreter(val settings: Settings, out: PrintWriter)
/** Parse the ScalaSig to find type aliases */
def aliasForType(path: String) = ByteCode.aliasForType(path)
+ /** Artificial object demonstrating completion */
+ def replVarsObject() = CompletionAware(
+ Map[String, CompletionAware](
+ "ids" -> CompletionAware(() => unqualifiedIds, completionAware _),
+ "synthVars" -> CompletionAware(() => allBoundNames filter isSynthVarName map (_.toString)),
+ "types" -> CompletionAware(() => allSeenTypes map (_.toString)),
+ "implicits" -> CompletionAware(() => allImplicits map (_.toString))
+ )
+ )
+
+ // Coming soon
+ // implicit def string2liftedcode(s: String): LiftedCode = new LiftedCode(s)
+ // case class LiftedCode(code: String) {
+ // val lifted: String = {
+ // beQuietDuring { interpret(code) }
+ // eval2[String]("({ " + code + " }).toString")
+ // }
+ // def >> : String = lifted
+ // }
+
// debugging
- private var debuggingOutput = false
- def DBG(s: String) = if (debuggingOutput) out println s else ()
+ def isReplDebug = settings.Yrepldebug.value
+ def DBG(s: String) = if (isReplDebug) out println s else ()
}
/** Utility methods for the Interpreter. */
object Interpreter {
+
+ import scala.collection.generic.CanBuildFrom
+ def partialFlatMap[A, B, CC[X] <: Traversable[X]]
+ (coll: CC[A])
+ (pf: PartialFunction[A, CC[B]])
+ (implicit bf: CanBuildFrom[CC[A], B, CC[B]]) =
+ {
+ val b = bf(coll)
+ for (x <- coll collect pf)
+ b ++= x
+
+ b.result
+ }
+
object DebugParam {
- implicit def tuple2debugparam[T](x: (String, T))(implicit m: scala.reflect.Manifest[T]): DebugParam[T] =
+ implicit def tuple2debugparam[T](x: (String, T))(implicit m: Manifest[T]): DebugParam[T] =
DebugParam(x._1, x._2)
- implicit def any2debugparam[T](x: T)(implicit m: scala.reflect.Manifest[T]): DebugParam[T] =
+ implicit def any2debugparam[T](x: T)(implicit m: Manifest[T]): DebugParam[T] =
DebugParam("p" + getCount(), x)
private var counter = 0
def getCount() = { counter += 1; counter }
}
- case class DebugParam[T](name: String, param: T)(implicit m: scala.reflect.Manifest[T]) {
+ case class DebugParam[T](name: String, param: T)(implicit m: Manifest[T]) {
val manifest = m
val typeStr = {
val str = manifest.toString
@@ -996,6 +1243,8 @@ object Interpreter {
def break(args: List[DebugParam[_]]): Unit = {
val intLoop = new InterpreterLoop
intLoop.settings = new Settings(Console.println)
+ // XXX come back to the dot handling
+ intLoop.settings.classpath.value = "."
intLoop.createInterpreter
intLoop.in = InteractiveReader.createDefault(intLoop.interpreter)
@@ -1004,29 +1253,24 @@ object Interpreter {
intLoop.interpreter.interpret("""def exit = println("Type :quit to resume program execution.")""")
for (p <- args) {
intLoop.interpreter.bind(p.name, p.typeStr, p.param)
- println("%s: %s".format(p.name, p.typeStr))
+ Console println "%s: %s".format(p.name, p.typeStr)
}
}
intLoop.repl()
intLoop.closeInterpreter
}
- /** Heuristically strip interpreter wrapper prefixes
- * from an interpreter output string.
- */
- def stripWrapperGunk(str: String): String = {
- val wrapregex = """(line[0-9]+\$object[$.])?(\$iw[$.])*"""
- str.replaceAll(wrapregex, "")
- }
-
def codegenln(leadingPlus: Boolean, xs: String*): String = codegen(leadingPlus, (xs ++ Array("\n")): _*)
def codegenln(xs: String*): String = codegenln(true, xs: _*)
+
def codegen(xs: String*): String = codegen(true, xs: _*)
def codegen(leadingPlus: Boolean, xs: String*): String = {
val front = if (leadingPlus) "+ " else ""
- xs.map("\"" + string2code(_) + "\"").mkString(front, " + ", "")
+ front + (xs map string2codeQuoted mkString " + ")
}
+ def string2codeQuoted(str: String) = "\"" + string2code(str) + "\""
+
/** Convert a string into code that can recreate the string.
* This requires replacing all special characters by escape
* codes. It does not add the surrounding " marks. */
diff --git a/src/compiler/scala/tools/nsc/InterpreterCommand.scala b/src/compiler/scala/tools/nsc/InterpreterCommand.scala
index a204edc70a..45e139194e 100644
--- a/src/compiler/scala/tools/nsc/InterpreterCommand.scala
+++ b/src/compiler/scala/tools/nsc/InterpreterCommand.scala
@@ -11,8 +11,7 @@ package scala.tools.nsc
* @author Lex Spoon
* @version 1.0
*/
-class InterpreterCommand(arguments: List[String], error: String => Unit)
-extends CompilerCommand(arguments, new Settings(error), error, false) {
+class InterpreterCommand(arguments: List[String], error: String => Unit) extends CompilerCommand(arguments, error) {
override val cmdName = "scala"
override lazy val fileEndings = List(".scalaint")
}
diff --git a/src/compiler/scala/tools/nsc/InterpreterLoop.scala b/src/compiler/scala/tools/nsc/InterpreterLoop.scala
index 1a941342c4..9d568418f8 100644
--- a/src/compiler/scala/tools/nsc/InterpreterLoop.scala
+++ b/src/compiler/scala/tools/nsc/InterpreterLoop.scala
@@ -6,18 +6,24 @@
package scala.tools.nsc
-import java.io.{ BufferedReader, File, FileReader, PrintWriter }
+import Predef.{ println => _, _ }
+import java.io.{ BufferedReader, FileReader, PrintWriter }
import java.io.IOException
import scala.tools.nsc.{ InterpreterResults => IR }
-import scala.collection.JavaConversions.asBuffer
+import scala.annotation.tailrec
+import scala.collection.mutable.ListBuffer
+import scala.concurrent.ops
+import util.{ ClassPath }
import interpreter._
-import io.{ Process }
+import io.{ File, Process }
// Classes to wrap up interpreter commands and their results
// You can add new commands by adding entries to val commands
// inside InterpreterLoop.
-object InterpreterControl {
+trait InterpreterControl {
+ self: InterpreterLoop =>
+
// the default result means "keep running, and don't record that line"
val defaultResult = Result(true, None)
@@ -26,7 +32,7 @@ object InterpreterControl {
def name: String
def help: String
def error(msg: String) = {
- println(":" + name + " " + msg + ".")
+ out.println(":" + name + " " + msg + ".")
Result(true, None)
}
def usage(): String
@@ -57,7 +63,6 @@ object InterpreterControl {
// the result of a single command
case class Result(keepRunning: Boolean, lineToRecord: Option[String])
}
-import InterpreterControl._
/** The
* <a href="http://scala-lang.org/" target="_top">Scala</a>
@@ -73,37 +78,30 @@ import InterpreterControl._
* @author Lex Spoon
* @version 1.2
*/
-class InterpreterLoop(in0: Option[BufferedReader], out: PrintWriter) {
+class InterpreterLoop(in0: Option[BufferedReader], protected val out: PrintWriter) extends InterpreterControl {
def this(in0: BufferedReader, out: PrintWriter) = this(Some(in0), out)
def this() = this(None, new PrintWriter(Console.out))
/** The input stream from which commands come, set by main() */
var in: InteractiveReader = _
- def history = in match {
- case x: JLineReader => Some(x.history)
- case _ => None
- }
- def historyList: Seq[String] =
- history map (x => asBuffer(x.getHistoryList): Seq[String]) getOrElse Nil
/** The context class loader at the time this object was created */
protected val originalClassLoader = Thread.currentThread.getContextClassLoader
var settings: Settings = _ // set by main()
var interpreter: Interpreter = _ // set by createInterpreter()
- def isettings = interpreter.isettings
- // XXX
- var addedClasspath: List[String] = Nil
+ // classpath entries added via :cp
+ var addedClasspath: String = ""
/** A reverse list of commands to replay if the user requests a :replay */
- var replayCommandsRev: List[String] = Nil
+ var replayCommandStack: List[String] = Nil
/** A list of commands to replay if the user requests a :replay */
- def replayCommands = replayCommandsRev.reverse
+ def replayCommands = replayCommandStack.reverse
/** Record a command for replay should the user request a :replay */
- def addReplay(cmd: String) = replayCommandsRev = cmd :: replayCommandsRev
+ def addReplay(cmd: String) = replayCommandStack ::= cmd
/** Close the interpreter and set the var to <code>null</code>. */
def closeInterpreter() {
@@ -116,21 +114,14 @@ class InterpreterLoop(in0: Option[BufferedReader], out: PrintWriter) {
/** Create a new interpreter. */
def createInterpreter() {
- if (!addedClasspath.isEmpty)
- settings.classpath.value += addedClasspath.map(File.pathSeparator + _).mkString
+ if (addedClasspath != "")
+ settings.classpath append addedClasspath
interpreter = new Interpreter(settings, out) {
override protected def parentClassLoader = classOf[InterpreterLoop].getClassLoader
}
interpreter.setContextClassLoader()
- }
-
- /** Bind the settings so that evaluated code can modify them */
- def bindSettings() {
- interpreter.beQuietDuring {
- interpreter.compileString(InterpreterSettings.sourceCodeForClass)
- interpreter.bind("settings", "scala.tools.nsc.InterpreterSettings", isettings)
- }
+ // interpreter.quietBind("settings", "scala.tools.nsc.InterpreterSettings", interpreter.isettings)
}
/** print a friendly help message */
@@ -151,37 +142,41 @@ class InterpreterLoop(in0: Option[BufferedReader], out: PrintWriter) {
|Type :help for more information.""" .
stripMargin.format(versionString, javaVmName, javaVersion)
- out println welcomeMsg
- out.flush
+ plushln(welcomeMsg)
}
/** Show the history */
def printHistory(xs: List[String]) {
val defaultLines = 20
- if (history.isEmpty)
+ if (in.history.isEmpty)
return println("No history available.")
- val current = history.get.getCurrentIndex
+ val current = in.history.get.index
val count = try xs.head.toInt catch { case _: Exception => defaultLines }
- val lines = historyList takeRight count
+ val lines = in.historyList takeRight count
val offset = current - lines.size + 1
for ((line, index) <- lines.zipWithIndex)
println("%d %s".format(index + offset, line))
}
+ /** Some print conveniences */
+ def println(x: Any) = out println x
+ def plush(x: Any) = { out print x ; out.flush() }
+ def plushln(x: Any) = { out println x ; out.flush() }
+
/** Search the history */
def searchHistory(_cmdline: String) {
val cmdline = _cmdline.toLowerCase
- if (history.isEmpty)
+ if (in.history.isEmpty)
return println("No history available.")
- val current = history.get.getCurrentIndex
- val offset = current - historyList.size + 1
+ val current = in.history.get.index
+ val offset = current - in.historyList.size + 1
- for ((line, index) <- historyList.zipWithIndex ; if line.toLowerCase contains cmdline)
+ for ((line, index) <- in.historyList.zipWithIndex ; if line.toLowerCase contains cmdline)
println("%d %s".format(index + offset, line))
}
@@ -202,10 +197,10 @@ class InterpreterLoop(in0: Option[BufferedReader], out: PrintWriter) {
val standardCommands: List[Command] = {
import CommandImplicits._
List(
+ OneArg("cp", "add an entry (jar or directory) to the classpath", addClasspath),
NoArgs("help", "print this help message", printHelp),
VarArgs("history", "show the history (optional arg: lines to show)", printHistory),
LineArg("h?", "search the history", searchHistory),
- OneArg("jar", "add a jar to the classpath", addJar),
OneArg("load", "load and interpret a Scala file", load),
NoArgs("power", "enable power user mode", power),
NoArgs("quit", "exit the interpreter", () => Result(false, None)),
@@ -224,8 +219,10 @@ class InterpreterLoop(in0: Option[BufferedReader], out: PrintWriter) {
List(
VarArgs("dump", "displays a view of the interpreter's internal state",
(xs: List[String]) => interpreter dumpState xs),
- VarArgs("tree", "displays ASTs for specified identifiers",
- (xs: List[String]) => interpreter dumpTrees xs)
+ OneArg("search", "search the classpath for classes matching regex", search)
+
+ // VarArgs("tree", "displays ASTs for specified identifiers",
+ // (xs: List[String]) => interpreter dumpTrees xs)
// LineArg("meta", "given code which produces scala code, executes the results",
// (xs: List[String]) => )
)
@@ -252,39 +249,35 @@ class InterpreterLoop(in0: Option[BufferedReader], out: PrintWriter) {
case _ => true
}
- /* For some reason, the first interpreted command always takes
- * a second or two. So, wait until the welcome message
- * has been printed before calling bindSettings. That way,
- * the user can read the welcome message while this
- * command executes.
- */
- val futLine = scala.concurrent.ops.future(readOneLine)
- bindSettings()
- if (!processLine(futLine()))
- return
+ // this is about the illusion of snappiness. We call initialize()
+ // which spins off a separate thread, then print the prompt and try
+ // our best to look ready. Ideally the user will spend a
+ // couple seconds saying "wow, it starts so fast!" and by the time
+ // they type a command the compiler is ready to roll.
+ interpreter.initialize()
- // loops until false, then returns
while (processLine(readOneLine)) { }
}
/** interpret all lines from a specified file */
def interpretAllFrom(filename: String) {
- val fileIn =
- try { new FileReader(filename) }
- catch { case _:IOException => return out.println("Error opening file: " + filename) }
+ val fileIn = File(filename)
+ if (!fileIn.exists)
+ return out.println("Error opening file: " + filename)
val oldIn = in
- val oldReplay = replayCommandsRev
+ val oldReplay = replayCommandStack
+
try {
- val inFile = new BufferedReader(fileIn)
- in = new SimpleReader(inFile, out, false)
- out.println("Loading " + filename + "...")
- out.flush
- repl
- } finally {
+ fileIn applyReader { reader =>
+ in = new SimpleReader(reader, out, false)
+ plushln("Loading " + filename + "...")
+ repl
+ }
+ }
+ finally {
in = oldIn
- replayCommandsRev = oldReplay
- fileIn.close
+ replayCommandStack = oldReplay
}
}
@@ -293,8 +286,7 @@ class InterpreterLoop(in0: Option[BufferedReader], out: PrintWriter) {
closeInterpreter()
createInterpreter()
for (cmd <- replayCommands) {
- out.println("Replaying: " + cmd)
- out.flush() // because maybe cmd will have its own output
+ plushln("Replaying: " + cmd) // flush because maybe cmd will have its own output
command(cmd)
out.println
}
@@ -315,8 +307,8 @@ class InterpreterLoop(in0: Option[BufferedReader], out: PrintWriter) {
}
def withFile(filename: String)(action: String => Unit) {
- if (! new File(filename).exists) out.println("That file does not exist")
- else action(filename)
+ if (File(filename).exists) action(filename)
+ else out.println("That file does not exist")
}
def load(arg: String) = {
@@ -328,22 +320,56 @@ class InterpreterLoop(in0: Option[BufferedReader], out: PrintWriter) {
Result(true, shouldReplay)
}
+ def addClasspath(arg: String): Unit = {
+ val f = File(arg).normalize
+ if (f.exists) {
+ addedClasspath = ClassPath.join(addedClasspath, f.path)
+ val totalClasspath = ClassPath.join(settings.classpath.value, addedClasspath)
+ println("Added '%s'. Your new classpath is:\n%s".format(f.path, totalClasspath))
+ replay()
+ }
+ else out.println("The path '" + f + "' doesn't seem to exist.")
+ }
- def addJar(arg: String): Unit = {
- val f = new java.io.File(arg)
- if (!f.exists) {
- out.println("The file '" + f + "' doesn't seem to exist.")
- return
+ /** This isn't going to win any efficiency awards, but it's only
+ * available in power mode so I'm unconcerned for the moment.
+ */
+ def search(arg: String) {
+ val MAX_RESULTS = 40
+ if (in.completion.isEmpty) return println("No classpath data available")
+ val comp = in.completion.get
+
+ import java.util.regex.PatternSyntaxException
+ import comp.pkgs.agent._
+ import scala.collection.JavaConversions._
+
+ try {
+ val regex = arg.r
+ val matches = (
+ for ((k, vs) <- dottedPaths) yield {
+ val pkgs = if (regex findFirstMatchIn k isDefined) List("package " + k) else Nil
+ val classes = vs filter (regex findFirstMatchIn _.visibleName isDefined) map (" class " + k + "." + _.visibleName)
+
+ pkgs ::: classes
+ }
+ ).flatten
+
+ matches take MAX_RESULTS foreach println
+ }
+ catch {
+ case _: PatternSyntaxException =>
+ return println("Invalid regular expression: you must use java.util.regex.Pattern syntax.")
}
- addedClasspath = addedClasspath ::: List(f.getCanonicalPath)
- println("Added " + f.getCanonicalPath + " to your classpath.")
- replay()
}
def power() {
powerUserOn = true
out println interpreter.powerUser()
- interpreter.quietBind("history", "scala.collection.immutable.List[String]", historyList.toList)
+ if (in.history.isDefined)
+ interpreter.quietBind("history", "scala.collection.immutable.List[String]", in.historyList)
+
+ if (in.completion.isDefined)
+ interpreter.quietBind("replHelper", "scala.tools.nsc.interpreter.CompletionAware", interpreter.replVarsObject())
}
def verbosity() = {
@@ -380,27 +406,138 @@ class InterpreterLoop(in0: Option[BufferedReader], out: PrintWriter) {
}
}
+ private val CONTINUATION_STRING = " | "
+ private val PROMPT_STRING = "scala> "
+
+ /** If it looks like they're pasting in a scala interpreter
+ * transcript, remove all the formatting we inserted so we
+ * can make some sense of it.
+ */
+ private var pasteStamp: Long = 0
+
+ /** Returns true if it's long enough to quit. */
+ def updatePasteStamp(): Boolean = {
+ /* Enough milliseconds between readLines to call it a day. */
+ val PASTE_FINISH = 1000
+
+ val prevStamp = pasteStamp
+ pasteStamp = System.currentTimeMillis
+
+ (pasteStamp - prevStamp > PASTE_FINISH)
+
+ }
+ /** TODO - we could look for the usage of resXX variables in the transcript.
+ * Right now backreferences to auto-named variables will break.
+ */
+
+ /** The trailing lines complication was an attempt to work around the introduction
+ * of newlines in e.g. email messages of repl sessions. It doesn't work because
+ * an unlucky newline can always leave you with a syntactically valid first line,
+ * which is executed before the next line is considered. So this doesn't actually
+ * accomplish anything, but I'm leaving it in case I decide to try harder.
+ */
+ case class PasteCommand(cmd: String, trailing: ListBuffer[String] = ListBuffer[String]())
+
+ /** Commands start on lines beginning with "scala>" and each successive
+ * line which begins with the continuation string is appended to that command.
+ * Everything else is discarded. When the end of the transcript is spotted,
+ * all the commands are replayed.
+ */
+ @tailrec private def cleanTranscript(lines: List[String], acc: List[PasteCommand]): List[PasteCommand] = lines match {
+ case Nil => acc.reverse
+ case x :: xs if x startsWith PROMPT_STRING =>
+ val first = x stripPrefix PROMPT_STRING
+ val (xs1, xs2) = xs span (_ startsWith CONTINUATION_STRING)
+ val rest = xs1 map (_ stripPrefix CONTINUATION_STRING)
+ val result = (first :: rest).mkString("", "\n", "\n")
+
+ cleanTranscript(xs2, PasteCommand(result) :: acc)
+
+ case ln :: lns =>
+ val newacc = acc match {
+ case Nil => Nil
+ case PasteCommand(cmd, trailing) :: accrest =>
+ PasteCommand(cmd, trailing :+ ln) :: accrest
+ }
+ cleanTranscript(lns, newacc)
+ }
+
+ /** The timestamp is for safety so it doesn't hang looking for the end
+ * of a transcript. Ad hoc parsing can't be too demanding. You can
+ * also use ctrl-D to start it parsing.
+ */
+ @tailrec private def interpretAsPastedTranscript(lines: List[String]) {
+ val line = in.readLine("")
+ val finished = updatePasteStamp()
+
+ if (line == null || finished || line.trim == PROMPT_STRING.trim) {
+ val xs = cleanTranscript(lines.reverse, Nil)
+ println("Replaying %d commands from interpreter transcript." format xs.size)
+ for (PasteCommand(cmd, trailing) <- xs) {
+ out.flush()
+ def runCode(code: String, extraLines: List[String]) {
+ (interpreter interpret code) match {
+ case IR.Incomplete if extraLines.nonEmpty =>
+ runCode(code + "\n" + extraLines.head, extraLines.tail)
+ case _ => ()
+ }
+ }
+ runCode(cmd, trailing.toList)
+ }
+ }
+ else
+ interpretAsPastedTranscript(line :: lines)
+ }
+
/** Interpret expressions starting with the first line.
* Read lines until a complete compilation unit is available
* or until a syntax error has been seen. If a full unit is
* read, go ahead and interpret it. Return the full string
* to be recorded for replay, if any.
*/
- def interpretStartingWith(code: String): Option[String] =
- if (code startsWith ".") interpretStartingWith(interpreter.mostRecentVar + code)
- else interpreter.interpret(code) match {
- case IR.Error => None
- case IR.Success => Some(code)
- case IR.Incomplete =>
- if (in.interactive && code.endsWith("\n\n")) {
- out.println("You typed two blank lines. Starting a new command.")
- None
- }
- else in.readLine(" | ") match {
- case null => None // end of file
- case line => interpretStartingWith(code + "\n" + line)
- }
+ def interpretStartingWith(code: String): Option[String] = {
+ def reallyInterpret = {
+ interpreter.interpret(code) match {
+ case IR.Error => None
+ case IR.Success => Some(code)
+ case IR.Incomplete =>
+ if (in.interactive && code.endsWith("\n\n")) {
+ out.println("You typed two blank lines. Starting a new command.")
+ None
+ }
+ else in.readLine(CONTINUATION_STRING) match {
+ case null => None // end of file
+ case line => interpretStartingWith(code + "\n" + line)
+ }
+ }
+ }
+
+ /** Here we place ourselves between the user and the interpreter and examine
+ * the input they are ostensibly submitting. We intervene in several cases:
+ *
+ * 1) If the line starts with "scala> " it is assumed to be an interpreter paste.
+ * 2) If the line starts with "." (but not ".." or "./") it is treated as an invocation
+ * on the previous result.
+ * 3) If the Completion object's execute returns Some(_), we inject that value
+ * and avoid the interpreter, as it's likely not valid scala code.
+ */
+ if (code == "") None
+ else if (code startsWith PROMPT_STRING) {
+ updatePasteStamp()
+ interpretAsPastedTranscript(List(code))
+ None
}
+ else if (Completion.looksLikeInvocation(code)) {
+ interpretStartingWith(interpreter.mostRecentVar + code)
+ }
+ else {
+ val result = for (comp <- in.completion ; res <- comp execute code) yield res
+ result match {
+ case Some(res) => injectAndName(res) ; None // completion took responsibility, so do not parse
+ case _ => reallyInterpret
+ }
+ }
+ }
// runs :load <file> on any files passed via -i
def loadFiles(settings: Settings) = settings match {
@@ -408,7 +545,7 @@ class InterpreterLoop(in0: Option[BufferedReader], out: PrintWriter) {
for (filename <- settings.loadfiles.value) {
val cmd = ":load " + filename
command(cmd)
- replayCommandsRev = cmd :: replayCommandsRev
+ addReplay(cmd)
out.println()
}
case _ =>
@@ -422,12 +559,10 @@ class InterpreterLoop(in0: Option[BufferedReader], out: PrintWriter) {
in = in0 match {
case Some(in0) => new SimpleReader(in0, out, true)
case None =>
- val emacsShell = System.getProperty("env.emacs", "") != ""
-
- // the interpeter is passed as an argument to expose tab completion info
- if (settings.Xnojline.value || emacsShell) new SimpleReader
+ // the interpreter is passed as an argument to expose tab completion info
+ if (settings.Xnojline.value || Properties.isEmacsShell) new SimpleReader
else if (settings.noCompletion.value) InteractiveReader.createDefault()
- else InteractiveReader.createDefault(interpreter, this)
+ else InteractiveReader.createDefault(interpreter)
}
loadFiles(settings)
@@ -437,20 +572,34 @@ class InterpreterLoop(in0: Option[BufferedReader], out: PrintWriter) {
printWelcome()
repl()
- } finally {
- closeInterpreter()
- }
+ } finally closeInterpreter()
+ }
+
+ private def objClass(x: Any) = x.asInstanceOf[AnyRef].getClass
+ private def objName(x: Any) = {
+ val clazz = objClass(x)
+ val typeParams = clazz.getTypeParameters
+ val basename = clazz.getName
+ val tpString = if (typeParams.isEmpty) "" else "[%s]".format(typeParams map (_ => "_") mkString ", ")
+
+ basename + tpString
}
// injects one value into the repl; returns pair of name and class
def injectOne(name: String, obj: Any): Tuple2[String, String] = {
- val className = obj.asInstanceOf[AnyRef].getClass.getName
+ val className = objName(obj)
interpreter.quietBind(name, className, obj)
(name, className)
}
+ def injectAndName(obj: Any): Tuple2[String, String] = {
+ val name = interpreter.getVarName
+ val className = objName(obj)
+ interpreter.bind(name, className, obj)
+ (name, className)
+ }
// injects list of values into the repl; returns summary string
- def inject(args: List[Any]): String = {
+ def injectDebug(args: List[Any]): String = {
val strs =
for ((arg, i) <- args.zipWithIndex) yield {
val varName = "p" + (i + 1)
@@ -473,7 +622,7 @@ class InterpreterLoop(in0: Option[BufferedReader], out: PrintWriter) {
// if they asked for no help and command is valid, we call the real main
neededHelp() match {
case "" => if (command.ok) main(command.settings) // else nothing
- case help => out print help ; out flush
+ case help => plush(help)
}
}
}
diff --git a/src/compiler/scala/tools/nsc/InterpreterSettings.scala b/src/compiler/scala/tools/nsc/InterpreterSettings.scala
index 99159b081d..a5e1c1e729 100644
--- a/src/compiler/scala/tools/nsc/InterpreterSettings.scala
+++ b/src/compiler/scala/tools/nsc/InterpreterSettings.scala
@@ -20,7 +20,17 @@ class InterpreterSettings(repl: Interpreter) {
* more than this number of characters, then the printout is
* truncated.
*/
- var maxPrintString = 2400
+ var maxPrintString = 800
+
+ /** The maximum number of completion candidates to print for tab
+ * completion without requiring confirmation.
+ */
+ var maxAutoprintCompletion = 250
+
+ /** String unwrapping can be disabled if it is causing issues.
+ * Settings this to false means you will see Strings like "$iw.$iw.".
+ */
+ var unwrapStrings = true
def deprecation_=(x: Boolean) = {
val old = repl.settings.deprecation.value
@@ -30,14 +40,21 @@ class InterpreterSettings(repl: Interpreter) {
}
def deprecation: Boolean = repl.settings.deprecation.value
- override def toString =
- "InterpreterSettings {\n" +
-// " loadPath = " + loadPath + "\n" +
- " maxPrintString = " + maxPrintString + "\n" +
- "}"
-}
+ def allSettings = Map(
+ "maxPrintString" -> maxPrintString,
+ "maxAutoprintCompletion" -> maxAutoprintCompletion,
+ "unwrapStrings" -> unwrapStrings,
+ "deprecation" -> deprecation
+ )
+ private def allSettingsString =
+ allSettings.toList sortBy (_._1) map { case (k, v) => " " + k + " = " + v + "\n" } mkString
+ override def toString = """
+ | InterpreterSettings {
+ | %s
+ | }""".stripMargin.format(allSettingsString)
+}
/* Utilities for the InterpreterSettings class
*
@@ -48,6 +65,10 @@ object InterpreterSettings {
/** Source code for the InterpreterSettings class. This is
* used so that the interpreter is sure to have the code
* available.
+ *
+ * XXX I'm not seeing why this degree of defensiveness is necessary.
+ * If files are missing the repl's not going to work, it's not as if
+ * we have string source backups for anything else.
*/
val sourceCodeForClass =
"""
diff --git a/src/compiler/scala/tools/nsc/Main.scala b/src/compiler/scala/tools/nsc/Main.scala
index 577439990d..eaa2970dda 100644
--- a/src/compiler/scala/tools/nsc/Main.scala
+++ b/src/compiler/scala/tools/nsc/Main.scala
@@ -7,6 +7,7 @@
package scala.tools.nsc
import java.io.File
+import File.pathSeparator
import scala.concurrent.SyncVar
@@ -14,6 +15,7 @@ import scala.tools.nsc.interactive.{ RefinedBuildManager, SimpleBuildManager }
import scala.tools.nsc.io.AbstractFile
import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
import scala.tools.nsc.util.{ BatchSourceFile, FakePos } //{Position}
+import Properties.{ versionString, copyrightString, residentPromptString, msilLibPath }
/** The main class for NSC, a compiler for the programming
* language Scala.
@@ -21,10 +23,10 @@ import scala.tools.nsc.util.{ BatchSourceFile, FakePos } //{Position}
object Main extends AnyRef with EvalLoop {
val versionMsg = "Scala compiler " +
- Properties.versionString + " -- " +
- Properties.copyrightString
+ versionString + " -- " +
+ copyrightString
- val prompt = Properties.residentPromptString
+ val prompt = residentPromptString
var reporter: ConsoleReporter = _
@@ -39,7 +41,7 @@ object Main extends AnyRef with EvalLoop {
def resident(compiler: Global) {
loop { line =>
val args = line.split(' ').toList
- val command = new CompilerCommand(args, new Settings(error), error, true)
+ val command = new CompilerCommand(args, new Settings(error))
compiler.reporter.reset
new compiler.Run() compile command.files
}
@@ -48,7 +50,7 @@ object Main extends AnyRef with EvalLoop {
def process(args: Array[String]) {
val settings = new Settings(error)
reporter = new ConsoleReporter(settings)
- val command = new CompilerCommand(args.toList, settings, error, false)
+ val command = new CompilerCommand(args.toList, settings)
if (command.settings.version.value)
reporter.info(null, versionMsg, true)
else if (command.settings.Yidedebug.value) {
@@ -78,16 +80,13 @@ object Main extends AnyRef with EvalLoop {
// enter resident mode
loop { line =>
val args = line.split(' ').toList
- val command = new CompilerCommand(args.toList, settings, error, true)
+ val command = new CompilerCommand(args.toList, settings)
buildManager.update(fileSet(command.files), Set.empty)
}
} else {
- if (command.settings.target.value == "msil") {
- val libpath = System.getProperty("msil.libpath")
- if (libpath != null)
- command.settings.assemrefs.value =
- command.settings.assemrefs.value + File.pathSeparator + libpath
- }
+ if (command.settings.target.value == "msil")
+ msilLibPath foreach (x => command.settings.assemrefs.value += (pathSeparator + x))
+
try {
val compiler = if (command.settings.Yrangepos.value) new interactive.Global(command.settings, reporter)
else new Global(command.settings, reporter)
diff --git a/src/compiler/scala/tools/nsc/MainGenericRunner.scala b/src/compiler/scala/tools/nsc/MainGenericRunner.scala
index cd16ef8c8b..fdc5fe82dd 100644
--- a/src/compiler/scala/tools/nsc/MainGenericRunner.scala
+++ b/src/compiler/scala/tools/nsc/MainGenericRunner.scala
@@ -7,13 +7,14 @@
package scala.tools.nsc
-import java.io.{ File, IOException }
+import java.io.IOException
import java.lang.{ClassNotFoundException, NoSuchMethodException}
import java.lang.reflect.InvocationTargetException
import java.net.{ URL, MalformedURLException }
+import scala.tools.util.PathResolver
+import io.{ File, Process }
import util.{ ClassPath, ScalaClassLoader }
-import File.pathSeparator
import Properties.{ versionString, copyrightString }
/** An object that runs Scala code. It has three possible
@@ -21,107 +22,28 @@ import Properties.{ versionString, copyrightString }
* or interactive entry.
*/
object MainGenericRunner {
- /** Append jars found in ${scala.home}/lib to
- * a specified classpath. Also append "." if the
- * input classpath is empty; otherwise do not.
- *
- * @param classpath
- * @return the new classpath
- */
- private def addClasspathExtras(classpath: String): String = {
- val scalaHome = Properties.scalaHome
-
- def listDir(name: String): List[File] = {
- val libdir = new File(new File(scalaHome), name)
- if (!libdir.exists || libdir.isFile) Nil else libdir.listFiles.toList
- }
- lazy val jarsInLib = listDir("lib") filter (_.getName endsWith ".jar")
- lazy val dirsInClasses = listDir("classes") filter (_.isDirectory)
- val cpScala =
- if (scalaHome == null) {
- // this is to make the interpreter work when running without the scala script
- // (e.g. from eclipse). Before, "java.class.path" was added to the user classpath
- // in Settings; this was changed to match the behavior of Sun's javac.
- val javacp = System.getProperty("java.class.path")
- if (javacp == null) Nil
- else ClassPath.expandPath(javacp)
- }
- else (jarsInLib ::: dirsInClasses) map (_.toString)
-
- // either prepend existing classpath or append "."
- (if (classpath == "") cpScala ::: List(".") else classpath :: cpScala) mkString pathSeparator
- }
-
def main(args: Array[String]) {
def errorFn(str: String) = Console println str
-
- val command = new GenericRunnerCommand(args.toList, errorFn)
- val settings = command.settings
- def sampleCompiler = new Global(settings)
-
- if (!command.ok)
- return errorFn("%s\n%s".format(command.usageMsg, sampleCompiler.pluginOptionsHelp))
-
- settings.classpath.value = addClasspathExtras(settings.classpath.value)
- settings.defines.applyToCurrentJVM
-
- if (settings.version.value)
- return errorFn("Scala code runner %s -- %s".format(versionString, copyrightString))
-
- if (command.shouldStopWithInfo)
- return errorFn(command getInfoMessage sampleCompiler)
-
def exitSuccess: Nothing = exit(0)
def exitFailure(msg: Any = null): Nothing = {
if (msg != null) errorFn(msg.toString)
exit(1)
}
- def exitCond(b: Boolean): Nothing =
- if (b) exitSuccess else exitFailure(null)
-
- def fileToURL(f: File): Option[URL] =
- try { Some(f.toURI.toURL) }
- catch { case e => Console.println(e); None }
-
- def paths(str: String): List[URL] =
- for (
- file <- ClassPath.expandPath(str) map (new File(_)) if file.exists;
- val url = fileToURL(file); if !url.isEmpty
- ) yield url.get
-
- def jars(dirs: String): List[URL] =
- for (
- libdir <- ClassPath.expandPath(dirs) map (new File(_)) if libdir.isDirectory;
- jarfile <- libdir.listFiles if jarfile.isFile && jarfile.getName.endsWith(".jar");
- val url = fileToURL(jarfile); if !url.isEmpty
- ) yield url.get
-
- def specToURL(spec: String): Option[URL] =
- try { Some(new URL(spec)) }
- catch { case e: MalformedURLException => Console.println(e); None }
+ def exitCond(b: Boolean): Nothing = if (b) exitSuccess else exitFailure(null)
- def urls(specs: String): List[URL] =
- if (specs == null || specs.length == 0) Nil
- else for (
- spec <- specs.split(" ").toList;
- val url = specToURL(spec); if !url.isEmpty
- ) yield url.get
+ val command = new GenericRunnerCommand(args.toList, errorFn _)
+ import command.settings
+ def sampleCompiler = new Global(settings) // def so its not created unless needed
- val classpath: List[URL] =
- paths(settings.bootclasspath.value) :::
- paths(settings.classpath.value) :::
- jars(settings.extdirs.value) :::
- urls(settings.Xcodebase.value)
-
- def createLoop(): InterpreterLoop = {
- val loop = new InterpreterLoop
- loop main settings
- loop
- }
+ if (!command.ok) return errorFn("%s\n%s".format(command.usageMsg, sampleCompiler.pluginOptionsHelp))
+ else if (settings.version.value) return errorFn("Scala code runner %s -- %s".format(versionString, copyrightString))
+ else if (command.shouldStopWithInfo) return errorFn(command getInfoMessage sampleCompiler)
def dashe = settings.execute.value
def dashi = settings.loadfiles.value
- def slurp = dashi map (file => io.File(file).slurp()) mkString "\n"
+ def slurp = dashi map (file => File(file).slurp()) mkString "\n"
+
+ val classpath: List[URL] = new PathResolver(settings) asURLs
/** Was code given in a -e argument? */
if (!settings.execute.isDefault) {
@@ -140,7 +62,10 @@ object MainGenericRunner {
exitCond(ScriptRunner.runCommand(settings, code, fullArgs))
}
else command.thingToRun match {
- case None => createLoop()
+ case None =>
+ // Questionably, we start the interpreter when there are no arguments.
+ new InterpreterLoop main settings
+
case Some(thingToRun) =>
val isObjectName =
settings.howtorun.value match {
diff --git a/src/compiler/scala/tools/nsc/MainTokenMetric.scala b/src/compiler/scala/tools/nsc/MainTokenMetric.scala
index 0ee7ee1fe1..9c123922d6 100644
--- a/src/compiler/scala/tools/nsc/MainTokenMetric.scala
+++ b/src/compiler/scala/tools/nsc/MainTokenMetric.scala
@@ -37,7 +37,7 @@ object MainTokenMetric {
def process(args: Array[String]) {
val settings = new Settings(error)
reporter = new ConsoleReporter(settings)
- val command = new CompilerCommand(args.toList, settings, error, false)
+ val command = new CompilerCommand(args.toList, settings)
try {
val compiler = new Global(command.settings, reporter)
tokenMetric(compiler, command.files)
diff --git a/src/compiler/scala/tools/nsc/ObjectRunner.scala b/src/compiler/scala/tools/nsc/ObjectRunner.scala
index befdf94072..210f9a1785 100644
--- a/src/compiler/scala/tools/nsc/ObjectRunner.scala
+++ b/src/compiler/scala/tools/nsc/ObjectRunner.scala
@@ -15,8 +15,7 @@ import util.ScalaClassLoader
* @author Lex Spoon
* @version 1.1, 2007/7/13
*/
-object ObjectRunner
-{
+object ObjectRunner {
/** Check whether a class with the specified name
* exists on the specified class path. */
def classExists(urls: List[URL], objectName: String): Boolean =
diff --git a/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala b/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala
index 2233ec269c..b011f88f2b 100644
--- a/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala
+++ b/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala
@@ -12,11 +12,8 @@ package scala.tools.nsc
*/
class OfflineCompilerCommand(
arguments: List[String],
- settings: Settings,
- error: String => Unit,
- interactive: Boolean)
-extends CompilerCommand(arguments, new Settings(error), error, false)
-{
+ settings: Settings)
+extends CompilerCommand(arguments, settings) {
override val cmdName = "fsc"
import settings._
diff --git a/src/compiler/scala/tools/nsc/PhaseAssembly.scala b/src/compiler/scala/tools/nsc/PhaseAssembly.scala
index edced3ad56..b3300a1ff9 100644
--- a/src/compiler/scala/tools/nsc/PhaseAssembly.scala
+++ b/src/compiler/scala/tools/nsc/PhaseAssembly.scala
@@ -12,7 +12,7 @@ import java.io.{BufferedWriter, FileWriter}
/**
* PhaseAssembly
- * Trait made to seperate the constraint solving of the phase order from
+ * Trait made to separate the constraint solving of the phase order from
* the rest of the compiler. See SIP 00002
*
*/
@@ -96,21 +96,8 @@ trait PhaseAssembly { self: Global =>
/* Given the entire graph, collect the phase objects at each level, where the phase
* names are sorted alphabetical at each level, into the compiler phase list
*/
- def compilerPhaseList(): List[SubComponent] = {
- var chain: List[SubComponent] = Nil
-
- var lvl = 1
- var nds = nodes.valuesIterator.filter(_.level == lvl).toList
- while(nds.size > 0) {
- nds = nds.sortWith((n1,n2) => (n1.phasename compareTo n2.phasename) < 0)
- for (n <- nds) {
- chain = chain ::: n.phaseobj.get
- }
- lvl += 1
- nds = nodes.valuesIterator.filter(_.level == lvl).toList
- }
- chain
- }
+ def compilerPhaseList(): List[SubComponent] =
+ nodes.values.toList filter (_.level > 0) sortBy (x => (x.level, x.phasename)) flatMap (_.phaseobj) flatten
/* Test if there are cycles in the graph, assign levels to the nodes
* and collapse hard links into nodes
@@ -167,7 +154,7 @@ trait PhaseAssembly { self: Global =>
} else if (sanity.length > 1) {
var msg = "Multiple phases want to run right after the phase " + sanity.head.to.phasename + "\n"
msg += "Phases: "
- sanity = sanity.sortWith((e1,e2) => (e1.frm.phasename compareTo e2.frm.phasename) < 0)
+ sanity = sanity sortBy (_.frm.phasename)
for (edge <- sanity) {
msg += edge.frm.phasename + ", "
}
diff --git a/src/compiler/scala/tools/nsc/Properties.scala b/src/compiler/scala/tools/nsc/Properties.scala
index a4d11c2e8a..4601a1ca64 100644
--- a/src/compiler/scala/tools/nsc/Properties.scala
+++ b/src/compiler/scala/tools/nsc/Properties.scala
@@ -13,12 +13,14 @@ object Properties extends scala.util.PropertiesTrait {
protected def pickJarBasedOn = classOf[Global]
// settings based on jar properties
- val fileEndingString = prop("file.ending", ".scala|.java")
- val residentPromptString = prop("resident.prompt", "\nnsc> ")
- val shellPromptString = prop("shell.prompt", "\nscala> ")
+ def fileEndingString = scalaPropOrElse("file.ending", ".scala|.java")
+ def residentPromptString = scalaPropOrElse("resident.prompt", "\nnsc> ")
+ def shellPromptString = scalaPropOrElse("shell.prompt", "\nscala> ")
- // derived values
- val cmdName = if (isWin) "scala.bat" else "scala"
- val fileEndings = fileEndingString.split("""\|""").toList
+ // settings based on system properties
+ def msilLibPath = propOrNone("msil.libpath")
+ // derived values
+ def isEmacsShell = propOrEmpty("env.emacs") != ""
+ def fileEndings = fileEndingString.split("""\|""").toList
}
diff --git a/src/compiler/scala/tools/nsc/ScalaDoc.scala b/src/compiler/scala/tools/nsc/ScalaDoc.scala
index f3c7c686a0..b7e416e121 100644
--- a/src/compiler/scala/tools/nsc/ScalaDoc.scala
+++ b/src/compiler/scala/tools/nsc/ScalaDoc.scala
@@ -11,7 +11,8 @@ import java.io.File
import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
import scala.tools.nsc.util.FakePos //{Position}
-
+import Properties.msilLibPath
+import File.pathSeparator
/** The main class for scaladoc, a front-end for the Scala compiler
* that generates documentation from source files.
@@ -37,7 +38,7 @@ object ScalaDoc {
reporter = new ConsoleReporter(docSettings)
val command =
- new CompilerCommand(args.toList, docSettings, error, false)
+ new CompilerCommand(args.toList, docSettings)
if (!reporter.hasErrors) { // No need to continue if reading the command generated errors
@@ -56,11 +57,8 @@ object ScalaDoc {
reporter.warning(null, "Phases are restricted when using Scaladoc")
else try {
- if (docSettings.target.value == "msil") {
- val libpath = System.getProperty("msil.libpath")
- if (libpath != null)
- docSettings.assemrefs.value = docSettings.assemrefs.value + File.pathSeparator + libpath
- }
+ if (docSettings.target.value == "msil")
+ msilLibPath foreach (x => docSettings.assemrefs.value += (pathSeparator + x))
val docProcessor = new scala.tools.nsc.doc.DocFactory(reporter, docSettings)
docProcessor.document(command.files)
diff --git a/src/compiler/scala/tools/nsc/ScriptRunner.scala b/src/compiler/scala/tools/nsc/ScriptRunner.scala
index 3423f8f45f..a096efb749 100644
--- a/src/compiler/scala/tools/nsc/ScriptRunner.scala
+++ b/src/compiler/scala/tools/nsc/ScriptRunner.scala
@@ -19,6 +19,7 @@ import java.net.URL
import java.util.jar.{ JarEntry, JarOutputStream }
import java.util.regex.Pattern
+import scala.tools.util.PathResolver
import scala.tools.nsc.reporters.{Reporter,ConsoleReporter}
import scala.tools.nsc.util.{ClassPath, CompoundSourceFile, BatchSourceFile, SourceFile, SourceFileFragment}
@@ -111,7 +112,7 @@ object ScriptRunner
jar.close
}
catch {
- case _: Error => jarFile.delete() // XXX what errors to catch?
+ case _: Exception => jarFile.delete()
}
}
@@ -184,7 +185,7 @@ object ScriptRunner
new CompoundSourceFile(preamble, middle, end)
}
- /** Compile a script using the fsc compilation deamon.
+ /** Compile a script using the fsc compilation daemon.
*
* @param settings ...
* @param scriptFileIn ...
@@ -194,31 +195,15 @@ object ScriptRunner
settings: GenericRunnerSettings,
scriptFileIn: String): Boolean =
{
- val scriptFile = CompileClient absFileName scriptFileIn
-
- {
- import settings._
- for (setting <- List(classpath, sourcepath, bootclasspath, extdirs, outdir)) {
- // DBG("%s = %s".format(setting.name, setting.value))
- setting.value = CompileClient absFileName setting.value
- }
- }
-
- val compSettingNames = new Settings(error).allSettings map (_.name)
- val compSettings = settings.allSettings filter (compSettingNames contains _.name)
+ val scriptFile = Path(scriptFileIn).toAbsolute.path
+ val compSettingNames = new Settings(error).visibleSettings.toList map (_.name)
+ val compSettings = settings.visibleSettings.toList filter (compSettingNames contains _.name)
val coreCompArgs = compSettings flatMap (_.unparse)
val compArgs = coreCompArgs ::: List("-Xscript", scriptMain(settings), scriptFile)
var compok = true
- // XXX temporary as I started using ManagedResource not remembering it wasn't checked in.
- def ManagedResource[T](x: => T) = Some(x)
-
- for {
- socket <- ManagedResource(CompileSocket getOrCreateSocket "")
- val _ = if (socket == null) return false
- out <- ManagedResource(new PrintWriter(socket.getOutputStream(), true))
- in <- ManagedResource(new BufferedReader(new InputStreamReader(socket.getInputStream())))
- } {
+ val socket = CompileSocket getOrCreateSocket "" getOrElse (return false)
+ socket.applyReaderAndWriter { (in, out) =>
out println (CompileSocket getPassword socket.getPort)
out println (compArgs mkString "\0")
@@ -227,8 +212,7 @@ object ScriptRunner
if (CompileSocket.errorPattern matcher fromServer matches)
compok = false
}
- // XXX temp until managed resource is available
- in.close() ; out.close() ; socket.close()
+ socket.close()
}
compok
@@ -240,7 +224,7 @@ object ScriptRunner
/** Compile a script and then run the specified closure with
* a classpath for the compiled script.
*
- * @returns true if compilation and the handler succeeds, false otherwise.
+ * @return true if compilation and the handler succeeds, false otherwise.
*/
private def withCompiledScript(
settings: GenericRunnerSettings,
@@ -300,27 +284,15 @@ object ScriptRunner
/** Run a script after it has been compiled
*
- * @returns true if execution succeeded, false otherwise
+ * @return true if execution succeeded, false otherwise
*/
private def runCompiled(
settings: GenericRunnerSettings,
compiledLocation: String,
scriptArgs: List[String]): Boolean =
{
- def fileToURL(f: JFile): Option[URL] =
- try Some(f.toURI.toURL) catch { case _: Exception => None }
-
- def paths(str: String, expandStar: Boolean): List[URL] =
- for {
- file <- ClassPath.expandPath(str, expandStar) map (new JFile(_))
- if file.exists
- url <- fileToURL(file)
- } yield url
-
- val classpath =
- (paths(settings.bootclasspath.value, true) :::
- paths(compiledLocation, false) :::
- paths(settings.classpath.value, true))
+ val pr = new PathResolver(settings)
+ val classpath = pr.asURLs :+ File(compiledLocation).toURL
try {
ObjectRunner.run(
@@ -342,7 +314,7 @@ object ScriptRunner
/** Run a script file with the specified arguments and compilation
* settings.
*
- * @returns true if compilation and execution succeeded, false otherwise.
+ * @return true if compilation and execution succeeded, false otherwise.
*/
def runScript(
settings: GenericRunnerSettings,
@@ -357,7 +329,7 @@ object ScriptRunner
/** Run a command
*
- * @returns true if compilation and execution succeeded, false otherwise.
+ * @return true if compilation and execution succeeded, false otherwise.
*/
def runCommand(
settings: GenericRunnerSettings,
@@ -366,7 +338,7 @@ object ScriptRunner
{
val scriptFile = File.makeTemp("scalacmd", ".scala")
// save the command to the file
- scriptFile writeAll List(command)
+ scriptFile writeAll command
try withCompiledScript(settings, scriptFile.path) { runCompiled(settings, _, scriptArgs) }
finally scriptFile.delete() // in case there was a compilation error
diff --git a/src/compiler/scala/tools/nsc/Settings.scala b/src/compiler/scala/tools/nsc/Settings.scala
index 157bc2dc1f..cd4626c0c6 100644
--- a/src/compiler/scala/tools/nsc/Settings.scala
+++ b/src/compiler/scala/tools/nsc/Settings.scala
@@ -6,945 +6,10 @@
package scala.tools.nsc
-import java.io.File
-import io.AbstractFile
-import util.SourceFile
-import Settings._
-import annotation.elidable
+import settings.MutableSettings
-class Settings(errorFn: String => Unit) extends ScalacSettings {
+/** A compatibility stub.
+ */
+class Settings(errorFn: String => Unit) extends MutableSettings(errorFn) {
def this() = this(Console.println)
-
- // optionizes a system property
- private def syspropopt(name: String): Option[String] = onull(System.getProperty(name))
- private def sysenvopt(name: String): Option[String] = onull(System.getenv(name))
-
- // given any number of possible path segments, flattens down to a
- // :-separated style path
- private def concatPath(segments: Option[String]*): String =
- segments.toList.flatMap(x => x) mkString File.pathSeparator
-
- protected def classpathDefault =
- sysenvopt("CLASSPATH") getOrElse "."
-
- protected def bootclasspathDefault =
- concatPath(syspropopt("sun.boot.class.path"), guessedScalaBootClassPath)
- // syspropopt("sun.boot.class.path") getOrElse ""
- // XXX scala-library.jar was being added to both boot and regular classpath until 8/18/09
- // Removing from boot classpath caused build/quick/bin/scala to fail.
- // Note to self, figure out how/why the bootclasspath is tied up with the locker/quick/pack.
-
- protected def extdirsDefault =
- concatPath(syspropopt("java.ext.dirs"), guessedScalaExtDirs)
-
- protected def assemExtdirsDefault =
- concatPath(guessedScalaExtDirs)
-
- protected def pluginsDirDefault =
- guess(List("misc", "scala-devel", "plugins"), _.isDirectory) getOrElse ""
-
- def onull[T <: AnyRef](x: T): Option[T] = if (x eq null) None else Some(x)
- def mkPath(base: String, segments: String*) = new File(base, segments.mkString(File.separator))
- def scalaHome: Option[String] = onull(Properties.scalaHome)
-
- // examine path relative to scala home and return Some(path) if it meets condition
- private def guess(xs: List[String], cond: (File) => Boolean): Option[String] = {
- if (scalaHome.isEmpty) return None
- val f = mkPath(scalaHome.get, xs: _*)
- if (cond(f)) Some(f.getAbsolutePath) else None
- }
-
- private def guessedScalaBootClassPath: Option[String] =
- guess(List("lib", "scala-library.jar"), _.isFile) orElse
- guess(List("classes", "library"), _.isDirectory)
-
- private def guessedScalaExtDirs: Option[String] =
- guess(List("lib"), _.isDirectory)
-
- override def hashCode() = allSettings.hashCode
- override def equals(that: Any) = that match {
- case s: Settings => this.allSettings == s.allSettings
- case _ => false
- }
-
- def checkDependencies: Boolean = {
- def hasValue(s: Setting, value: String): Boolean = s match {
- case bs: BooleanSetting => bs.value
- case ss: StringSetting => ss.value == value
- case cs: ChoiceSetting => cs.value == value
- case _ => "" == value
- }
-
- for (setting <- allSettings ; (dep, value) <- setting.dependency)
- if (!setting.isDefault && !hasValue(dep, value)) {
- errorFn("incomplete option " + setting.name + " (requires " + dep.name + ")")
- return false
- }
-
- true
- }
-
-
- /** A list pairing source directories with their output directory.
- * This option is not available on the command line, but can be set by
- * other tools (IDEs especially). The command line specifies a single
- * output directory that is used for all source files, denoted by a
- * '*' in this list.
- */
- lazy val outputDirs = new OutputDirs
-
- /**
- * Split command line parameters by space, properly process quoted parameter
- */
- def splitParams(line: String): List[String] = {
- def parse(from: Int, i: Int, args: List[String]): List[String] = {
- if (i < line.length) {
- line.charAt(i) match {
- case ' ' =>
- val args1 = fetchArg(from, i) :: args
- val j = skipS(i + 1)
- if (j >= 0) {
- parse(j, j, args1)
- } else args1
- case '"' =>
- val j = skipTillQuote(i + 1)
- if (j > 0) {
- parse(from, j + 1, args)
- } else {
- errorFn("Parameters '" + line + "' with unmatched quote at " + i + ".")
- Nil
- }
- case _ => parse(from, i + 1, args)
- }
- } else { // done
- if (i > from) {
- fetchArg(from, i) :: args
- } else args
- }
- }
-
- def fetchArg(from: Int, until: Int) = {
- if (line.charAt(from) == '"') {
- line.substring(from + 1, until - 1)
- } else {
- line.substring(from, until)
- }
- }
-
- def skipTillQuote(i: Int): Int = {
- if (i < line.length) {
- line.charAt(i) match {
- case '"' => i
- case _ => skipTillQuote(i + 1)
- }
- } else -1
- }
-
- def skipS(i: Int): Int = {
- if (i < line.length) {
- line.charAt(i) match {
- case ' ' => skipS(i + 1)
- case _ => i
- }
- } else -1
- }
-
- // begin split
- val j = skipS(0)
- if (j >= 0) {
- parse(j, j, Nil).reverse
- } else Nil
- }
-
- def parseParams(args: List[String]): List[String] = {
- // verify command exists and call setter
- def tryToSetIfExists(
- cmd: String,
- args: List[String],
- setter: (Setting) => (List[String] => Option[List[String]])
- ): Option[List[String]] =
- lookupSetting(cmd) match {
- case None => errorFn("Parameter '" + cmd + "' is not recognised by Scalac.") ; None
- case Some(cmd) =>
- val res = setter(cmd)(args)
- cmd.postSetHook()
- res
- }
-
- // if arg is of form -Xfoo:bar,baz,quux
- def parseColonArg(s: String): Option[List[String]] = {
- val idx = s indexWhere (_ == ':')
- val (p, args) = (s.substring(0, idx), s.substring(idx+1).split(",").toList)
-
- // any non-Nil return value means failure and we return s unmodified
- tryToSetIfExists(p, args, (s: Setting) => s.tryToSetColon _)
- }
- // if arg is of form -Dfoo=bar or -Dfoo (name = "-D")
- def isPropertyArg(s: String) = lookupSetting(s.substring(0, 2)) match {
- case Some(x: DefinesSetting) => true
- case _ => false
- }
- def parsePropertyArg(s: String): Option[List[String]] = {
- val (p, args) = (s.substring(0, 2), s.substring(2))
-
- tryToSetIfExists(p, List(args), (s: Setting) => s.tryToSetProperty _)
- }
-
- // if arg is of form -Xfoo or -Xfoo bar (name = "-Xfoo")
- def parseNormalArg(p: String, args: List[String]): Option[List[String]] =
- tryToSetIfExists(p, args, (s: Setting) => s.tryToSet _)
-
- def doArgs(args: List[String]): List[String] = {
- if (args.isEmpty) return Nil
- val arg :: rest = args
- if (arg == "") {
- // it looks like Ant passes "" sometimes
- rest
- }
- else if (!arg.startsWith("-")) {
- errorFn("Argument '" + arg + "' does not start with '-'.")
- args
- }
- else if (arg == "-") {
- errorFn("'-' is not a valid argument.")
- args
- }
- else
- // we dispatch differently based on the appearance of p:
- // 1) If it has a : it is presumed to be -Xfoo:bar,baz
- // 2) If the first two chars are the name of a command, -Dfoo=bar
- // 3) Otherwise, the whole string should be a command name
- //
- // Internally we use Option[List[String]] to discover error,
- // but the outside expects our arguments back unchanged on failure
- if (arg contains ":") parseColonArg(arg) match {
- case Some(_) => rest
- case None => args
- }
- else if (isPropertyArg(arg)) parsePropertyArg(arg) match {
- case Some(_) => rest
- case None => args
- }
- else parseNormalArg(arg, rest) match {
- case Some(xs) => xs
- case None => args
- }
- }
-
- doArgs(args)
- }
-
- // checks both name and any available abbreviations
- def lookupSetting(cmd: String): Option[Setting] =
- settingSet.find(x => x.name == cmd || (x.abbreviations contains cmd))
-
- // The *Setting classes used to be case classes defined inside of Settings.
- // The choice of location was poor because it tied the type of each setting
- // to its enclosing instance, which broke equality, so I moved the class
- // definitions into the companion object. The one benefit it was getting
- // out of this was using its knowledge of the enclosing instance to add
- // itself to the list of settings in the Setting constructor. However,
- // this was dicey and not working predictably, as illustrated in the comment
- // in GenericRunnerSettings:
- //
- // For some reason, "object defines extends Setting(...)"
- // does not work here. The object is present but the setting
- // is not added to allsettings.
- //
- // To capture similar semantics, I created instance methods on setting
- // which call a factory method for the right kind of object and then add
- // the newly constructed instance to allsettings. The constructors are
- // private to force all creation to go through these methods.
- //
- // The usage of case classes was becoming problematic (due to custom
- // equality, case class inheritance, and the need to control object
- // creation without a synthetic apply method getting in the way) and
- // it was providing little benefit, so they are no longer cases.
-
- // a wrapper for all Setting creators to keep our list up to date
- // and tell them how to announce errors
- private def add[T <: Setting](s: T): T = {
- s setErrorHandler errorFn
- allsettings += s
- s
- }
-
- /**
- * The canonical creators for Setting objects.
- */
- import Function.{ untupled }
- import Setting._
-
- // A bit too clever, but I haven't found any other way to compose
- // functions with arity 2+ without having to annotate parameter types
- lazy val IntSetting = untupled((sint _).tupled andThen add[IntSetting])
- lazy val BooleanSetting = untupled((bool _).tupled andThen add[BooleanSetting])
- lazy val StringSetting = untupled((str _).tupled andThen add[StringSetting])
- lazy val MultiStringSetting = untupled((multi _).tupled andThen add[MultiStringSetting])
- lazy val ChoiceSetting = untupled((choice _).tupled andThen add[ChoiceSetting])
- lazy val DebugSetting = untupled((sdebug _).tupled andThen add[DebugSetting])
- lazy val PhasesSetting = untupled((phase _).tupled andThen add[PhasesSetting])
- lazy val DefinesSetting = add(defines())
- lazy val OutputSetting = untupled((output _).tupled andThen add[OutputSetting])
-
- override def toString() =
- "Settings(\n%s)" format (settingSet filter (s => !s.isDefault) map (" " + _ + "\n") mkString)
-}
-
-object Settings {
- // basically this is a value which remembers if it's been modified
- trait SettingValue {
- type T <: Any
- protected var v: T
- private var setByUser: Boolean = false
- def isDefault: Boolean = !setByUser
- def value: T = v
- def value_=(arg: T) = { setByUser = true ; v = arg }
- val choices : List[T] = Nil
- }
-
- /** A class for holding mappings from source directories to
- * their output location. This functionality can be accessed
- * only programmatically. The command line compiler uses a
- * single output location, but tools may use this functionality
- * to set output location per source directory.
- */
- class OutputDirs {
- /** Pairs of source directory - destination directory. */
- private var outputDirs: List[(AbstractFile, AbstractFile)] = Nil
-
- /** If this is not None, the output location where all
- * classes should go.
- */
- private var singleOutDir: Option[AbstractFile] = None
-
- /** Add a destination directory for sources found under srcdir.
- * Both directories should exits.
- */
- def add(srcDir: String, outDir: String): Unit =
- add(checkDir(AbstractFile.getDirectory(srcDir), srcDir),
- checkDir(AbstractFile.getDirectory(outDir), outDir))
-
- /** Check that dir is exists and is a directory. */
- private def checkDir(dir: AbstractFile, name: String): AbstractFile = {
- if ((dir eq null) || !dir.isDirectory)
- throw new FatalError(name + " does not exist or is not a directory")
- dir
- }
-
- /** Set the single output directory. From now on, all files will
- * be dumped in there, regardless of previous calls to 'add'.
- */
- def setSingleOutput(outDir: String) {
- val dst = AbstractFile.getDirectory(outDir)
- setSingleOutput(checkDir(dst, outDir))
- }
-
- /** Set the single output directory. From now on, all files will
- * be dumped in there, regardless of previous calls to 'add'.
- */
- def setSingleOutput(dir: AbstractFile) {
- singleOutDir = Some(dir)
- }
-
- def add(src: AbstractFile, dst: AbstractFile) {
- singleOutDir = None
- outputDirs ::= (src, dst)
- }
-
- /** Return the list of source-destination directory pairs. */
- def outputs: List[(AbstractFile, AbstractFile)] = outputDirs
-
- /** Return the output directory for the given file.
- */
- def outputDirFor(src: AbstractFile): AbstractFile = {
- def isBelow(srcDir: AbstractFile, outDir: AbstractFile) =
- src.path.startsWith(srcDir.path)
-
- singleOutDir match {
- case Some(d) => d
- case None =>
- (outputs find (isBelow _).tupled) match {
- case Some((_, d)) => d
- case _ =>
- throw new FatalError("Could not find an output directory for "
- + src.path + " in " + outputs)
- }
- }
- }
-
- /** Return the source file path(s) which correspond to the given
- * classfile path and SourceFile attribute value, subject to the
- * condition that source files are arranged in the filesystem
- * according to Java package layout conventions.
- *
- * The given classfile path must be contained in at least one of
- * the specified output directories. If it does not then this
- * method returns Nil.
- *
- * Note that the source file is not required to exist, so assuming
- * a valid classfile path this method will always return a list
- * containing at least one element.
- *
- * Also that if two or more source path elements target the same
- * output directory there will be two or more candidate source file
- * paths.
- */
- def srcFilesFor(classFile : AbstractFile, srcPath : String) : List[AbstractFile] = {
- def isBelow(srcDir: AbstractFile, outDir: AbstractFile) =
- classFile.path.startsWith(outDir.path)
-
- singleOutDir match {
- case Some(d) => Nil
- case None =>
- (outputs filter (isBelow _).tupled) match {
- case Nil => Nil
- case matches => matches.map(_._1.lookupPathUnchecked(srcPath, false))
- }
- }
- }
- }
-
- // The Setting companion object holds all the factory methods
- object Setting {
- def bool(name: String, descr: String) =
- new BooleanSetting(name, descr)
-
- def str(name: String, arg: String, descr: String, default: String) =
- new StringSetting(name, arg, descr, default)
-
- def sint(
- name: String,
- descr: String,
- default: Int,
- range: Option[(Int, Int)] = None,
- parser: String => Option[Int] = _ => None
- ) =
- new IntSetting(name, descr, default, range, parser)
-
- def multi(name: String, arg: String, descr: String) =
- new MultiStringSetting(name, arg, descr)
-
- def choice(name: String, descr: String, choices: List[String], default: String): ChoiceSetting =
- new ChoiceSetting(name, descr, choices, default)
-
- def sdebug(name: String, descr: String, choices: List[String], default: String, defaultEmpty: String) =
- new DebugSetting(name, descr, choices, default, defaultEmpty)
-
- def phase(name: String, descr: String) =
- new PhasesSetting(name, descr)
-
- def defines() = new DefinesSetting()
-
- def output(outputDirs: OutputDirs, default: String) =
- new OutputSetting(outputDirs, default)
- }
-
- implicit val SettingOrdering : Ordering[Setting] = Ordering.ordered;
- /** A base class for settings of all types.
- * Subclasses each define a `value' field of the appropriate type.
- */
- abstract class Setting(descr: String) extends Ordered[Setting] with SettingValue {
- /** The name of the option as written on the command line, '-' included. */
- def name: String
-
- /** Error handling function, set after creation by enclosing Settings instance */
- private var _errorFn: String => Unit = _
- private[Settings] def setErrorHandler(e: String => Unit) = _errorFn = e
- def errorFn(msg: String) = _errorFn(msg)
- def errorAndValue[T](msg: String, x: T): T = { errorFn(msg) ; x }
-
- /** Will be called after this Setting is set, for any cases where the
- * Setting wants to perform extra work. */
- private var _postSetHook: () => Unit = () => ()
- def postSetHook(): Unit = _postSetHook()
- def withPostSetHook(f: () => Unit): this.type = { _postSetHook = f ; this }
-
- /** After correct Setting has been selected, tryToSet is called with the
- * remainder of the command line. It consumes any applicable arguments and
- * returns the unconsumed ones.
- */
- private[Settings] def tryToSet(args: List[String]): Option[List[String]]
-
- /** Commands which can take lists of arguments in form -Xfoo:bar,baz override
- * this method and accept them as a list. It returns List[String] for
- * consistency with tryToSet, and should return its incoming arguments
- * unmodified on failure, and Nil on success.
- */
- private[Settings] def tryToSetColon(args: List[String]): Option[List[String]] =
- errorAndValue("'" + name + "' does not accept multiple arguments", None)
-
- /** Commands which take properties in form -Dfoo=bar or -Dfoo
- */
- private[Settings] def tryToSetProperty(args: List[String]): Option[List[String]] =
- errorAndValue("'" + name + "' does not accept property style arguments", None)
-
- /**
- * Attempt to set from a properties file style property value.
- */
- def tryToSetFromPropertyValue(s : String) {
- tryToSet(s :: Nil)
- }
-
- /** The syntax defining this setting in a help string */
- private var _helpSyntax = name
- def helpSyntax: String = _helpSyntax
- def withHelpSyntax(s: String): this.type = { _helpSyntax = s ; this }
-
- /** Abbreviations for this setting */
- private var _abbreviations: List[String] = Nil
- def abbreviations = _abbreviations
- def withAbbreviation(s: String): this.type = { _abbreviations ++= List(s) ; this }
-
- /** A description of the purpose of this setting in a help string */
- def helpDescription = descr
-
- /** A list of Strings which can recreate this setting. */
- def unparse: List[String]
-
- /** Optional dependency on another setting */
- protected[Settings] var dependency: Option[(Setting, String)] = None
- def dependsOn(s: Setting, value: String): this.type = { dependency = Some((s, value)); this }
- def dependsOn(s: Setting): this.type = dependsOn(s, "")
-
- def isStandard: Boolean = !isFscSpecific && !isAdvanced && !isPrivate && name != "-Y"
- def isFscSpecific: Boolean = (name == "-shutdown")
- def isAdvanced: Boolean = (name startsWith "-X") && name != "-X"
- def isPrivate: Boolean = (name == "-P") || ((name startsWith "-Y") && name != "-Y")
-
- // Ordered (so we can use TreeSet)
- def compare(that: Setting): Int = name compare that.name
- def compareLists[T <% Ordered[T]](xs: List[T], ys: List[T]): Boolean =
- xs.sortWith(_ < _) == ys.sortWith(_ < _)
-
- // Equality
- def eqValues: List[Any] = List(name, value)
- def isEq(other: Setting) = eqValues == other.eqValues
- override def hashCode() = name.hashCode
- override def toString() = "%s = %s".format(name, value)
- }
-
- /** A setting represented by an integer */
- class IntSetting private[Settings](
- val name: String,
- val descr: String,
- val default: Int,
- val range: Option[(Int, Int)],
- parser: String => Option[Int])
- extends Setting(descr) {
- type T = Int
- protected var v = default
-
- // not stable values!
- val IntMin = Int.MinValue
- val IntMax = Int.MaxValue
- def min = range map (_._1) getOrElse IntMin
- def max = range map (_._2) getOrElse IntMax
-
- override def value_=(s: Int) =
- if (isInputValid(s)) super.value_=(s) else errorMsg
-
- // Validate that min and max are consistent
- assert(min <= max)
-
- // Helper to validate an input
- private def isInputValid(k: Int): Boolean = (min <= k) && (k <= max)
-
- // Helper to generate a textual explaination of valid inputs
- private def getValidText: String = (min, max) match {
- case (IntMin, IntMax) => "can be any integer"
- case (IntMin, x) => "must be less than or equal to "+x
- case (x, IntMax) => "must be greater than or equal to "+x
- case _ => "must be between %d and %d".format(min, max)
- }
-
- // Ensure that the default value is actually valid
- assert(isInputValid(default))
-
- def parseArgument(x: String): Option[Int] = {
- parser(x) orElse {
- try { Some(x.toInt) }
- catch { case _: NumberFormatException => None }
- }
- }
-
- def errorMsg = errorFn("invalid setting for -"+name+" "+getValidText)
-
- def tryToSet(args: List[String]) =
- if (args.isEmpty) errorAndValue("missing argument", None)
- else parseArgument(args.head) match {
- case Some(i) => value = i ; Some(args.tail)
- case None => errorMsg ; None
- }
-
- def unparse: List[String] =
- if (value == default) Nil
- else List(name, value.toString)
-
- override def equals(that: Any) = that match {
- case x: IntSetting => this isEq x
- case _ => false
- }
- }
-
- /** A setting represented by a boolean flag (false, unless set) */
- class BooleanSetting private[Settings](
- val name: String,
- val descr: String)
- extends Setting(descr) {
- type T = Boolean
- protected var v = false
-
- def tryToSet(args: List[String]) = { value = true ; Some(args) }
- def unparse: List[String] = if (value) List(name) else Nil
- override def tryToSetFromPropertyValue(s : String) {
- value = s.equalsIgnoreCase("true")
- }
- override def equals(that: Any) = that match {
- case x: BooleanSetting => this isEq x
- case _ => false
- }
- }
-
- /** A setting represented by a string, (`default' unless set) */
- class StringSetting private[Settings](
- val name: String,
- val arg: String,
- val descr: String,
- val default: String)
- extends Setting(descr) {
- type T = String
- protected var v = default
-
- def tryToSet(args: List[String]) = args match {
- case Nil => errorAndValue("missing argument", None)
- case x :: xs => value = x ; Some(xs)
- }
- def unparse: List[String] = if (value == default) Nil else List(name, value)
-
- withHelpSyntax(name + " <" + arg + ">")
-
- override def equals(that: Any) = that match {
- case x: StringSetting => this isEq x
- case _ => false
- }
- }
-
- /** Set the output directory. */
- class OutputSetting private[Settings](
- outputDirs: OutputDirs,
- default: String)
- extends StringSetting("-d", "directory", "Specify where to place generated class files", default) {
- value = default
- override def value_=(str: String) {
- super.value_=(str)
- outputDirs.setSingleOutput(str)
- }
- }
-
- /** A setting that accumulates all strings supplied to it,
- * until it encounters one starting with a '-'. */
- class MultiStringSetting private[Settings](
- val name: String,
- val arg: String,
- val descr: String)
- extends Setting(descr) {
- type T = List[String]
- protected var v: List[String] = Nil
- def appendToValue(str: String) { value ++= List(str) }
-
- def tryToSet(args: List[String]) = {
- val (strings, rest) = args span (x => !x.startsWith("-"))
- strings foreach appendToValue
-
- Some(rest)
- }
- override def tryToSetColon(args: List[String]) = tryToSet(args)
- def unparse: List[String] = value map { name + ":" + _ }
-
- withHelpSyntax(name + ":<" + arg + ">")
- override def equals(that: Any) = that match {
- case x: MultiStringSetting => this isEq x
- case _ => false
- }
- }
-
- /** A setting represented by a string in a given set of <code>choices</code>,
- * (<code>default</code> unless set).
- */
- class ChoiceSetting private[Settings](
- val name: String,
- val descr: String,
- override val choices: List[String],
- val default: String)
- extends Setting(descr + choices.mkString(" (", ",", ")")) {
- type T = String
- protected var v: String = default
- protected def argument: String = name.substring(1)
-
- def tryToSet(args: List[String]) = { value = default ; Some(args) }
- override def tryToSetColon(args: List[String]) = args match {
- case Nil => errorAndValue("missing " + argument, None)
- case List(x) if choices contains x => value = x ; Some(Nil)
- case List(x) => errorAndValue("'" + x + "' is not a valid choice for '" + name + "'", None)
- case xs => errorAndValue("'" + name + "' does not accept multiple arguments.", None)
- }
- def unparse: List[String] =
- if (value == default) Nil else List(name + ":" + value)
-
- withHelpSyntax(name + ":<" + argument + ">")
- override def equals(that: Any) = that match {
- case x: ChoiceSetting => this isEq x
- case _ => false
- }
- }
-
- /** Same as ChoiceSetting but have a <code>level</code> int which tells the
- * index of the selected choice. The <code>defaultEmpty</code> is used when
- * this setting is used without specifying any of the available choices.
- */
- class DebugSetting private[Settings](
- name: String,
- descr: String,
- choices: List[String],
- default: String,
- val defaultEmpty: String)
- extends ChoiceSetting(name, descr, choices, default) {
- def indexOf[T](xs: List[T], e: T): Option[Int] = xs.indexOf(e) match {
- case -1 => None
- case x => Some(x)
- }
- var level: Int = indexOf(choices, default).get
-
- override def value_=(choice: String) = {
- super.value_=(choice)
- level = indexOf(choices, choice).get
- }
-
- override def tryToSet(args: List[String]) =
- if (args.isEmpty) { value = defaultEmpty ; Some(Nil) }
- else super.tryToSet(args)
- override def equals(that: Any) = that match {
- case x: DebugSetting => this isEq x
- case _ => false
- }
- }
-
- /** A setting represented by a list of strings which should be prefixes of
- * phase names. This is not checked here, however. Alternatively the string
- * "all" can be used to represent all phases.
- * (the empty list, unless set)
- */
- class PhasesSetting private[Settings](
- val name: String,
- val descr: String)
- extends Setting(descr + " <phase> or \"all\"") {
- type T = List[String]
- protected var v: List[String] = Nil
-
- def tryToSet(args: List[String]) = errorAndValue("missing phase", None)
- override def tryToSetColon(args: List[String]) = args match {
- case Nil => errorAndValue("missing phase", None)
- case xs => value ++= xs ; Some(Nil)
- }
- // we slightly abuse the usual meaning of "contains" here by returning
- // true if our phase list contains "all", regardless of the incoming argument
- def contains(phasename: String): Boolean =
- doAllPhases || (value exists { phasename startsWith _ } )
-
- def doAllPhases() = value contains "all"
- def unparse: List[String] = value map { name + ":" + _ }
-
- override def equals(that: Any) = that match {
- case ps: PhasesSetting if name == ps.name =>
- (doAllPhases && ps.doAllPhases) || compareLists(value, ps.value)
- case _ => false
- }
-
- withHelpSyntax(name + ":<phase>")
- }
-
- /** A setting for a -D style property definition */
- class DefinesSetting private[Settings] extends Setting("set a Java property") {
- type T = List[(String, String)]
- protected var v: T = Nil
- def name = "-D"
- withHelpSyntax(name + "<prop>")
-
- // given foo=bar returns Some(foo, bar), or None if parse fails
- def parseArg(s: String): Option[(String, String)] = {
- if (s == "") return None
- val regexp = """^(.*)?=(.*)$""".r
-
- regexp.findAllIn(s).matchData.toList match {
- case Nil => Some(s, "")
- case List(md) => md.subgroups match { case List(a,b) => Some(a,b) }
- }
- }
-
- def tryToSet(args: List[String]) =
- if (args.isEmpty) None
- else parseArg(args.head) match {
- case None => None
- case Some((a, b)) => value ++= List((a, b)) ; Some(args.tail)
- }
-
- /** Apply the specified properties to the current JVM */
- def applyToCurrentJVM =
- value foreach { case (k, v) => System.getProperties.setProperty(k, v) }
-
- def unparse: List[String] =
- value map { case (k,v) => "-D" + k + (if (v == "") "" else "=" + v) }
- override def equals(that: Any) = that match {
- case x: DefinesSetting => this isEq x
- case _ => false
- }
- }
-
-}
-
-trait ScalacSettings {
- self: Settings =>
-
- import collection.immutable.TreeSet
-
- /** A list of all settings */
- protected var allsettings: Set[Setting] = TreeSet[Setting]()
- def settingSet: Set[Setting] = allsettings
- def allSettings: List[Setting] = settingSet.toList
-
- /** Disable a setting */
- def disable(s: Setting) = allsettings -= s
-
- /**
- * Temporary Settings
- */
- val suppressVTWarn = BooleanSetting ("-Ysuppress-vt-typer-warnings", "Suppress warnings from the typer when testing the virtual class encoding, NOT FOR FINAL!")
-
- /**
- * Standard settings
- */
- // argfiles is only for the help message
- val argfiles = BooleanSetting ("@<file>", "A text file containing compiler arguments (options and source files)")
- val bootclasspath = StringSetting ("-bootclasspath", "path", "Override location of bootstrap class files", bootclasspathDefault)
- val classpath = StringSetting ("-classpath", "path", "Specify where to find user class files", classpathDefault).withAbbreviation("-cp")
- val outdir = OutputSetting (outputDirs, ".")
- val dependenciesFile = StringSetting ("-dependencyfile", "file", "Specify the file in which dependencies are tracked", ".scala_dependencies")
- val deprecation = BooleanSetting ("-deprecation", "Output source locations where deprecated APIs are used")
- val encoding = StringSetting ("-encoding", "encoding", "Specify character encoding used by source files", Properties.sourceEncoding)
- val explaintypes = BooleanSetting ("-explaintypes", "Explain type errors in more detail")
- val extdirs = StringSetting ("-extdirs", "dirs", "Override location of installed extensions", extdirsDefault)
- val debuginfo = DebugSetting ("-g", "Specify level of generated debugging info", List("none", "source", "line", "vars", "notailcalls"), "vars", "vars")
- val help = BooleanSetting ("-help", "Print a synopsis of standard options")
- val make = ChoiceSetting ("-make", "Specify recompilation detection strategy", List("all", "changed", "immediate", "transitive", "transitivenocp"), "all") .
- withHelpSyntax("-make:<strategy>")
- val nowarnings = BooleanSetting ("-nowarn", "Generate no warnings")
- val XO = BooleanSetting ("-optimise", "Generates faster bytecode by applying optimisations to the program").withAbbreviation("-optimize")
- val printLate = BooleanSetting ("-print", "Print program with all Scala-specific features removed")
- val sourcepath = StringSetting ("-sourcepath", "path", "Specify where to find input source files", "")
- val target = ChoiceSetting ("-target", "Specify for which target object files should be built", List("jvm-1.5", "msil"), "jvm-1.5")
- val unchecked = BooleanSetting ("-unchecked", "Enable detailed unchecked warnings")
- val uniqid = BooleanSetting ("-uniqid", "Print identifiers with unique names for debugging")
- val verbose = BooleanSetting ("-verbose", "Output messages about what the compiler is doing")
- val version = BooleanSetting ("-version", "Print product version and exit")
-
- /**
- * -X "Advanced" settings
- */
- val Xhelp = BooleanSetting ("-X", "Print a synopsis of advanced options")
- val assemname = StringSetting ("-Xassem-name", "file", "Name of the output assembly (only relevant with -target:msil)", "").dependsOn(target, "msil")
- val assemrefs = StringSetting ("-Xassem-path", "path", "List of assemblies referenced by the program (only relevant with -target:msil)", ".").dependsOn(target, "msil")
- val assemextdirs = StringSetting ("-Xassem-extdirs", "dirs", "List of directories containing assemblies, defaults to `lib'", assemExtdirsDefault).dependsOn(target, "msil")
- val sourcedir = StringSetting ("-Xsourcedir", "directory", "When -target:msil, the source folder structure is mirrored in output directory.", ".").dependsOn(target, "msil")
- val checkInit = BooleanSetting ("-Xcheckinit", "Add runtime checks on field accessors. Uninitialized accesses result in an exception being thrown.")
- val noassertions = BooleanSetting ("-Xdisable-assertions", "Generate no assertions and assumptions")
- val elideLevel = IntSetting ("-Xelide-level", "Generate calls to @elidable-marked methods only method priority is greater than argument.",
- elidable.ASSERTION, None, elidable.byName.get(_))
- val Xexperimental = BooleanSetting ("-Xexperimental", "Enable experimental extensions")
- val noForwarders = BooleanSetting ("-Xno-forwarders", "Do not generate static forwarders in mirror classes")
- val future = BooleanSetting ("-Xfuture", "Turn on future language features")
- val genPhaseGraph = StringSetting ("-Xgenerate-phase-graph", "file", "Generate the phase graphs (outputs .dot files) to fileX.dot", "")
- val XlogImplicits = BooleanSetting ("-Xlog-implicits", "Show more info on why some implicits are not applicable")
- val nouescape = BooleanSetting ("-Xno-uescape", "Disables handling of \\u unicode escapes")
- val XnoVarargsConversion = BooleanSetting("-Xno-varargs-conversion", "disable varags conversion")
- val Xnojline = BooleanSetting ("-Xnojline", "Do not use JLine for editing")
- val plugin = MultiStringSetting("-Xplugin", "file", "Load a plugin from a file")
- val disable = MultiStringSetting("-Xplugin-disable", "plugin", "Disable a plugin")
- val showPlugins = BooleanSetting ("-Xplugin-list", "Print a synopsis of loaded plugins")
- val require = MultiStringSetting("-Xplugin-require", "plugin", "Abort unless a plugin is available")
- val pluginsDir = StringSetting ("-Xpluginsdir", "path", "Location to find compiler plugins", pluginsDirDefault)
- val print = PhasesSetting ("-Xprint", "Print out program after")
- val writeICode = BooleanSetting ("-Xprint-icode", "Log internal icode to *.icode files")
- val Xprintpos = BooleanSetting ("-Xprint-pos", "Print tree positions (as offsets)")
- val printtypes = BooleanSetting ("-Xprint-types", "Print tree types (debugging option)")
- val prompt = BooleanSetting ("-Xprompt", "Display a prompt after each error (debugging option)")
- val resident = BooleanSetting ("-Xresident", "Compiler stays resident, files to compile are read from standard input")
- val script = StringSetting ("-Xscript", "object", "Compile as a script, wrapping the code into object.main()", "")
- val Xshowcls = StringSetting ("-Xshow-class", "class", "Show class info", "")
- val Xshowobj = StringSetting ("-Xshow-object", "object", "Show object info", "")
- val showPhases = BooleanSetting ("-Xshow-phases", "Print a synopsis of compiler phases")
- val sourceReader = StringSetting ("-Xsource-reader", "classname", "Specify a custom method for reading source files", "scala.tools.nsc.io.SourceReader")
-
- /**
- * -Y "Private" settings
- */
- val Yhelp = BooleanSetting ("-Y", "Print a synopsis of private options")
- val browse = PhasesSetting ("-Ybrowse", "Browse the abstract syntax tree after")
- val check = PhasesSetting ("-Ycheck", "Check the tree at the end of")
- val Xcloselim = BooleanSetting ("-Yclosure-elim", "Perform closure elimination")
- val Xcodebase = StringSetting ("-Ycodebase", "codebase", "Specify the URL containing the Scala libraries", "")
- val noCompletion = BooleanSetting ("-Yno-completion", "Disable tab-completion in the REPL")
- val Xdce = BooleanSetting ("-Ydead-code", "Perform dead code elimination")
- val debug = BooleanSetting ("-Ydebug", "Output debugging messages")
- val Xdetach = BooleanSetting ("-Ydetach", "Perform detaching of remote closures")
- // val doc = BooleanSetting ("-Ydoc", "Generate documentation")
- val inline = BooleanSetting ("-Yinline", "Perform inlining when possible")
- val Xlinearizer = ChoiceSetting ("-Ylinearizer", "Linearizer to use", List("normal", "dfs", "rpo", "dump"), "rpo") .
- withHelpSyntax("-Ylinearizer:<which>")
- val log = PhasesSetting ("-Ylog", "Log operations in")
- val Ynogenericsig = BooleanSetting ("-Yno-generic-signatures", "Suppress generation of generic signatures for Java")
- val noimports = BooleanSetting ("-Yno-imports", "Compile without any implicit imports")
- val nopredefs = BooleanSetting ("-Yno-predefs", "Compile without any implicit predefined values")
- val Yrecursion = IntSetting ("-Yrecursion", "Recursion depth used when locking symbols", 0, Some(0, Int.MaxValue), _ => None)
- val selfInAnnots = BooleanSetting ("-Yself-in-annots", "Include a \"self\" identifier inside of annotations")
- val Xshowtrees = BooleanSetting ("-Yshow-trees", "Show detailed trees when used in connection with -print:phase")
- val skip = PhasesSetting ("-Yskip", "Skip")
- val Xsqueeze = ChoiceSetting ("-Ysqueeze", "if on, creates compact code in matching", List("on","off"), "on") .
- withHelpSyntax("-Ysqueeze:<enabled>")
- val Ystatistics = BooleanSetting ("-Ystatistics", "Print compiler statistics")
- val stop = PhasesSetting ("-Ystop", "Stop after phase")
- val refinementMethodDispatch =
- ChoiceSetting ("-Ystruct-dispatch", "Selects dispatch method for structural refinement method calls",
- List("no-cache", "mono-cache", "poly-cache", "invoke-dynamic"), "poly-cache") .
- withHelpSyntax("-Ystruct-dispatch:<method>")
- val specialize = BooleanSetting ("-Yspecialize", "Specialize generic code on types.")
- val Yrangepos = BooleanSetting ("-Yrangepos", "Use range positions for syntax trees.")
- val Yidedebug = BooleanSetting ("-Yide-debug", "Generate, validate and output trees using the interactive compiler.")
- val Ybuilderdebug = ChoiceSetting ("-Ybuilder-debug", "Compile using the specified build manager", List("none", "refined", "simple"), "none") .
- withHelpSyntax("-Ybuilder-debug:<method>")
- val Ytyperdebug = BooleanSetting ("-Ytyper-debug", "Trace all type assignements")
- val Ypmatdebug = BooleanSetting ("-Ypmat-debug", "Trace all pattern matcher activity.")
- val Ytailrec = BooleanSetting ("-Ytailrecommend", "Alert methods which would be tail-recursive if private or final.")
- val Yjenkins = BooleanSetting ("-Yjenkins-hashCodes", "Use jenkins hash algorithm for case class generated hashCodes.")
-
- // Warnings
- val Xwarninit = BooleanSetting ("-Xwarninit", "Warn about possible changes in initialization semantics")
- val Xchecknull = BooleanSetting ("-Xcheck-null", "Emit warning on selection of nullable reference")
- val Xwarndeadcode = BooleanSetting ("-Ywarn-dead-code", "Emit warnings for dead code")
- val YwarnShadow = BooleanSetting ("-Ywarn-shadowing", "Emit warnings about possible variable shadowing.")
- val YwarnCatches = BooleanSetting ("-Ywarn-catches", "Emit warnings about catch blocks which catch everything.")
- val Xwarnings = BooleanSetting ("-Xstrict-warnings", "Emit warnings about lots of things.") .
- withPostSetHook(() =>
- List(YwarnShadow, YwarnCatches, Xwarndeadcode, Xwarninit) foreach (_.value = true)
- )
- /**
- * "fsc-specific" settings.
- */
- val fscShutdown = BooleanSetting ("-shutdown", "Shutdown the fsc daemon")
-
- /**
- * -P "Plugin" settings
- */
- val pluginOptions = MultiStringSetting("-P", "plugin:opt", "Pass an option to a plugin") .
- withHelpSyntax("-P:<plugin>:<opt>")
}
diff --git a/src/compiler/scala/tools/nsc/ast/DocComments.scala b/src/compiler/scala/tools/nsc/ast/DocComments.scala
index a6792b3ba7..3144d8140d 100755
--- a/src/compiler/scala/tools/nsc/ast/DocComments.scala
+++ b/src/compiler/scala/tools/nsc/ast/DocComments.scala
@@ -29,7 +29,7 @@ trait DocComments { self: SymbolTable =>
/** The position of the raw doc comment of symbol `sym`, or NoPosition if missing
* If a symbol does not have a doc comment but some overridden version of it does,
- * the posititon of the doc comment of the overridden version is returned instead.
+ * the position of the doc comment of the overridden version is returned instead.
*/
def docCommentPos(sym: Symbol): Position =
getDocComment(sym) map (_.pos) getOrElse NoPosition
@@ -154,9 +154,9 @@ trait DocComments { self: SymbolTable =>
var tocopy = startTag(dst, dstSections dropWhile (!isMovable(dst, _)))
if (copyFirstPara) {
- val eop = // end of first para, which is delimited by blank line, or tag, or end of comment
- findNext(src, 0) (src.charAt(_) == '\n') min startTag(src, srcSections)
- out append src.substring(0, eop)
+ val eop = // end of comment body (first para), which is delimited by blank line, or tag, or end of comment
+ (findNext(src, 0)(src.charAt(_) == '\n')) min startTag(src, srcSections)
+ out append src.substring(0, eop).trim
copied = 3
tocopy = 3
}
@@ -167,16 +167,13 @@ trait DocComments { self: SymbolTable =>
case None =>
srcSec match {
case Some((start1, end1)) =>
- out append dst.substring(copied, tocopy)
+ out append dst.substring(copied, tocopy).trim
copied = tocopy
- out append src.substring(start1, end1)
+ out append src.substring(start1, end1).trim
case None =>
}
}
- def mergeParam(name: String, srcMap: Map[String, (Int, Int)], dstMap: Map[String, (Int, Int)]) =
- mergeSection(srcMap get name, dstMap get name)
-
for (params <- sym.paramss; param <- params)
mergeSection(srcParams get param.name.toString, dstParams get param.name.toString)
for (tparam <- sym.typeParams)
@@ -280,7 +277,7 @@ trait DocComments { self: SymbolTable =>
startsWithTag(raw, idx, "@define") || startsWithTag(raw, idx, "@usecase"))
val (defines, usecases) = sections partition (startsWithTag(raw, _, "@define"))
val end = startTag(raw, sections)
-/*
+ /*
println("processing doc comment:")
println(raw)
println("===========>")
diff --git a/src/compiler/scala/tools/nsc/ast/NodePrinters.scala b/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
index 89f036f34b..ccab6423cf 100644
--- a/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
+++ b/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
@@ -80,7 +80,7 @@ abstract class NodePrinters {
if (sym hasFlag COVARIANT ) buf.append(" | COVARIANT")
if (sym hasFlag CAPTURED ) buf.append(" | CAPTURED")
if (sym hasFlag BYNAMEPARAM ) buf.append(" | BYNAMEPARAM")
- if (sym hasFlag CONTRAVARIANT) buf.append(" | CONTRVARIANT")
+ if (sym hasFlag CONTRAVARIANT) buf.append(" | CONTRAVARIANT")
if (sym hasFlag LABEL ) buf.append(" | LABEL")
if (sym hasFlag INCONSTRUCTOR) buf.append(" | INCONSTRUCTOR")
if (sym hasFlag ABSOVERRIDE ) buf.append(" | ABSOVERRIDE")
@@ -158,46 +158,27 @@ abstract class NodePrinters {
}
def nodeinfo2(tree: Tree): String =
(if (comma) "," else "") + nodeinfo(tree)
+
+ def applyCommon(name: String, tree: Tree, fun: Tree, args: List[Tree]) {
+ println(name + "(" + nodeinfo(tree))
+ traverse(fun, level + 1, true)
+ if (args.isEmpty)
+ println(" Nil // no argument")
+ else {
+ val n = args.length
+ println(" List( // " + n + " arguments(s)")
+ for (i <- 0 until n)
+ traverse(args(i), level + 2, i < n-1)
+ println(" )")
+ }
+ printcln(")")
+ }
+
tree match {
- case AppliedTypeTree(tpt, args) =>
- println("AppliedTypeTree(" + nodeinfo(tree))
- traverse(tpt, level + 1, true)
- if (args.isEmpty)
- println(" List() // no argument")
- else {
- val n = args.length
- println(" List( // " + n + " arguments(s)")
- for (i <- 0 until n)
- traverse(args(i), level + 2, i < n-1)
- println(" )")
- }
- printcln(")")
- case Apply(fun, args) =>
- println("Apply(" + nodeinfo(tree))
- traverse(fun, level + 1, true)
- if (args.isEmpty)
- println(" List() // no argument")
- else {
- val n = args.length
- println(" List( // " + n + " argument(s)")
- for (i <- 0 until n)
- traverse(args(i), level + 2, i < n-1)
- println(" )")
- }
- printcln(")")
- case ApplyDynamic(fun, args) =>
- println("ApplyDynamic(" + nodeinfo(tree))
- traverse(fun, level + 1, true)
- if (args.isEmpty)
- println(" List() // no argument")
- else {
- val n = args.length
- println(" List( // " + n + " argument(s)")
- for (i <- 0 until n)
- traverse(args(i), level + 2, i < n-1)
- println(" )")
- }
- printcln(")")
+ case AppliedTypeTree(tpt, args) => applyCommon("AppliedTypeTree", tree, tpt, args)
+ case Apply(fun, args) => applyCommon("Apply", tree, fun, args)
+ case ApplyDynamic(fun, args) => applyCommon("ApplyDynamic", tree, fun, args)
+
case Block(stats, expr) =>
println("Block(" + nodeinfo(tree))
if (stats.isEmpty)
@@ -355,16 +336,11 @@ abstract class NodePrinters {
def printUnit(unit: CompilationUnit) {
print("// Scala source: " + unit.source + "\n")
- if (unit.body ne null) {
- print(nodeToString(unit.body)); println()
- } else {
- print("<null>")
- }
- println()
+ println(Option(unit.body) map (x => nodeToString(x) + "\n") getOrElse "<null>")
}
def printAll() {
print("[[syntax trees at end of " + phase + "]]")
- for (unit <- global.currentRun.units) printUnit(unit)
+ global.currentRun.units foreach printUnit
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
index 42428c7d8f..eaad12c0ed 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
@@ -680,7 +680,7 @@ abstract class TreeBrowsers {
toDocument(thistpe) :/: ", " :/:
toDocument(supertpe) ::")"))
case _ =>
- throw new Error("Unknown case: " + t.toString() +", "+ t.getClass)
+ Predef.error("Unknown case: " + t.toString() +", "+ t.getClass)
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
index a788015262..fd13958053 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
@@ -88,6 +88,8 @@ trait TreeDSL {
def ANY_>= (other: Tree) = fn(target, nme.GE, other)
def ANY_<= (other: Tree) = fn(target, nme.LE, other)
def OBJ_!= (other: Tree) = fn(target, Object_ne, other)
+ def OBJ_EQ (other: Tree) = fn(target, nme.eq, other)
+ def OBJ_NE (other: Tree) = fn(target, nme.ne, other)
def INT_| (other: Tree) = fn(target, getMember(IntClass, nme.OR), other)
def INT_& (other: Tree) = fn(target, getMember(IntClass, nme.AND), other)
@@ -187,10 +189,11 @@ trait TreeDSL {
}
/** Top level accessible. */
- def THROW(sym: Symbol, msg: Tree = null) = {
- val arg: List[Tree] = if (msg == null) Nil else List(msg.TOSTRING())
- Throw(New(TypeTree(sym.tpe), List(arg)))
- }
+ def MATCHERROR(arg: Tree) = Throw(New(TypeTree(MatchErrorClass.tpe), List(List(arg))))
+ /** !!! should generalize null guard from match error here. */
+ def THROW(sym: Symbol): Throw = Throw(New(TypeTree(sym.tpe), List(Nil)))
+ def THROW(sym: Symbol, msg: Tree): Throw = Throw(New(TypeTree(sym.tpe), List(List(msg.TOSTRING()))))
+
def NEW(tpe: Tree, args: Tree*) = New(tpe, List(args.toList))
def NEW(sym: Symbol, args: Tree*) =
if (args.isEmpty) New(TypeTree(sym.tpe))
diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
index 7f2bcf99c5..9f2065f297 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
@@ -98,7 +98,7 @@ abstract class TreeGen
mkAttributedQualifier(firstStable.get)
case _ =>
- throw new Error("bad qualifier: " + tpe)
+ abort("bad qualifier: " + tpe)
}
/** Builds a reference to given symbol with given stable prefix. */
@@ -192,7 +192,7 @@ abstract class TreeGen
mkTypeApply(value, tpe, (if (any) Any_asInstanceOf else Object_asInstanceOf))
def mkClassOf(tp: Type): Tree =
- Literal(Constant(tp)) setType Predef_classOfType(tp)
+ Literal(Constant(tp)) setType ConstantType(Constant(tp))// ClassType(tp)
def mkCheckInit(tree: Tree): Tree = {
val tpe =
@@ -331,6 +331,17 @@ abstract class TreeGen
Apply(meth, List(tree))
}
+ /** Try to convert Select(qual, name) to a SelectFromTypeTree.
+ */
+ def convertToSelectFromType(qual: Tree, name: Name): Tree = {
+ def selFromType(qual1: Tree) = SelectFromTypeTree(qual1 setPos qual.pos, name)
+ qual match {
+ case Select(qual1, name) => selFromType(Select(qual1, name.toTypeName))
+ case Ident(name) => selFromType(Ident(name.toTypeName))
+ case _ => EmptyTree
+ }
+ }
+
/** Used in situations where you need to access value of an expression several times
*/
def evalOnce(expr: Tree, owner: Symbol, unit: CompilationUnit)(within: (() => Tree) => Tree): Tree = {
diff --git a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
index fd7227c371..2e543a0960 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
@@ -98,7 +98,7 @@ abstract class TreeInfo {
def mayBeVarGetter(sym: Symbol) = sym.info match {
case PolyType(List(), _) => sym.owner.isClass && !sym.isStable
- case _: ImplicitMethodType => sym.owner.isClass && !sym.isStable
+ case mt: MethodType => mt.isImplicit && sym.owner.isClass && !sym.isStable
case _ => false
}
@@ -350,4 +350,21 @@ abstract class TreeInfo {
case TypeDef(_, _, _, _) => !isAbsTypeDef(tree)
case _ => false
}
+
+ /** Some handy extractors for spotting true and false expressions
+ * through the haze of braces.
+ */
+ abstract class SeeThroughBlocks[T] {
+ protected def unapplyImpl(x: Tree): T
+ def unapply(x: Tree): T = x match {
+ case Block(Nil, expr) => unapply(expr)
+ case _ => unapplyImpl(x)
+ }
+ }
+ object IsTrue extends SeeThroughBlocks[Boolean] {
+ protected def unapplyImpl(x: Tree): Boolean = x equalsStructure Literal(Constant(true))
+ }
+ object IsFalse extends SeeThroughBlocks[Boolean] {
+ protected def unapplyImpl(x: Tree): Boolean = x equalsStructure Literal(Constant(false))
+ }
}
diff --git a/src/compiler/scala/tools/nsc/ast/TreePrinters.scala b/src/compiler/scala/tools/nsc/ast/TreePrinters.scala
index 4ba5f17a40..cb899a2560 100644
--- a/src/compiler/scala/tools/nsc/ast/TreePrinters.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreePrinters.scala
@@ -8,18 +8,17 @@ package scala.tools.nsc
package ast
import compat.Platform.{EOL => LINE_SEPARATOR}
-import java.io.{OutputStream, PrintWriter, Writer}
+import java.io.{ OutputStream, PrintWriter, StringWriter, Writer }
import symtab.Flags._
import symtab.SymbolTable
-abstract class TreePrinters {
+trait TreePrinters { trees: SymbolTable =>
- val trees: SymbolTable
- import trees._
+ import treeInfo.{ IsTrue, IsFalse }
final val showOuterTests = false
- class TreePrinter(out: PrintWriter) {
+ class TreePrinter(out: PrintWriter) extends trees.AbsTreePrinter(out) {
protected var indentMargin = 0
protected val indentStep = 2
protected var indentString = " " // 40
@@ -29,7 +28,10 @@ abstract class TreePrinters {
def indent = indentMargin += indentStep
def undent = indentMargin -= indentStep
- def println {
+ protected def doPrintPositions = settings.Xprintpos.value
+ def printPosition(tree: Tree) = if (doPrintPositions) print(tree.pos.show)
+
+ def println() {
out.println()
while (indentMargin > indentString.length())
indentString += indentString
@@ -46,8 +48,8 @@ abstract class TreePrinters {
}
def printColumn(ts: List[Tree], start: String, sep: String, end: String) {
- print(start); indent; println
- printSeq(ts){print}{print(sep); println}; undent; println; print(end)
+ print(start); indent; println()
+ printSeq(ts){print}{print(sep); println()}; undent; println(); print(end)
}
def printRow(ts: List[Tree], start: String, sep: String, end: String) {
@@ -75,11 +77,11 @@ abstract class TreePrinters {
def printParam(tree: Tree) {
tree match {
case ValDef(mods, name, tp, rhs) =>
- if (settings.Xprintpos.value) print(tree.pos.show)
+ printPosition(tree)
printAnnotations(tree)
print(symName(tree, name)); printOpt(": ", tp); printOpt(" = ", rhs)
case TypeDef(mods, name, tparams, rhs) =>
- if (settings.Xprintpos.value) print(tree.pos.show)
+ printPosition(tree)
print(symName(tree, name))
printTypeParams(tparams); print(rhs)
}
@@ -93,46 +95,47 @@ abstract class TreePrinters {
printColumn(List(tree), "{", ";", "}")
}
}
- def symName(tree: Tree, name: Name): String =
- if (tree.symbol != null && tree.symbol != NoSymbol) {
- ((if (tree.symbol.isMixinConstructor) "/*"+tree.symbol.owner.name+"*/" else "") +
- tree.symbol.nameString)
- } else name.toString();
+
+ private def symNameInternal(tree: Tree, name: Name, decoded: Boolean): String = {
+ val nameToString: Name => String = if (decoded) _.decode else _.toString
+
+ tree.symbol match {
+ case null | NoSymbol => nameToString(name)
+ case sym =>
+ val prefix = if (sym.isMixinConstructor) "/*%s*/".format(nameToString(sym.owner.name)) else ""
+ prefix + tree.symbol.nameString
+ }
+ }
+
+ def decodedSymName(tree: Tree, name: Name) = symNameInternal(tree, name, true)
+ def symName(tree: Tree, name: Name) = symNameInternal(tree, name, false)
def printOpt(prefix: String, tree: Tree) {
if (!tree.isEmpty) { print(prefix); print(tree) }
}
def printModifiers(tree: Tree, mods: Modifiers) {
- if (tree.symbol == NoSymbol)
- printFlags(mods.flags, mods.privateWithin.toString)
- else if (tree.symbol.privateWithin == NoSymbol ||
- tree.symbol.privateWithin == tree.symbol.owner)
- printFlags(tree.symbol.flags, "")
- else
- printFlags(tree.symbol.flags, tree.symbol.privateWithin.name.toString)
+ def pw = tree.symbol.privateWithin
+ val args =
+ if (tree.symbol == NoSymbol) (mods.flags, mods.privateWithin)
+ else if (pw == NoSymbol || pw == tree.symbol.owner) (tree.symbol.flags, "")
+ else (tree.symbol.flags, pw.name)
+
+ printFlags(args._1, args._2.toString)
}
def printFlags(flags: Long, privateWithin: String) {
var mask: Long = if (settings.debug.value) -1L else PrintableFlags
val s = flagsToString(flags & mask, privateWithin)
- if (s.length() != 0) print(s + " ")
+ if (s != "") print(s + " ")
}
def printAnnotations(tree: Tree) {
- if (!tree.symbol.rawAnnotations.isEmpty) {
- val annots = tree.symbol.annotations
- if (!annots.isEmpty) {
- annots foreach { annot => print("@"+annot+" ") }
- println
- }
- } else {
- val annots = tree.asInstanceOf[MemberDef].mods.annotations
- if (!annots.isEmpty) {
- annots foreach { annot => print("@"+annot+" ") }
- println
- }
- }
+ val annots =
+ if (tree.symbol.rawAnnotations.nonEmpty) tree.symbol.annotations
+ else tree.asInstanceOf[MemberDef].mods.annotations
+
+ annots foreach (annot => print("@"+annot+" "))
}
def print(str: String) { out.print(str) }
@@ -165,7 +168,7 @@ abstract class TreePrinters {
case ValDef(mods, name, tp, rhs) =>
printAnnotations(tree)
printModifiers(tree, mods)
- print(if (mods.hasFlag(MUTABLE)) "var " else "val ")
+ print(if (mods.isVariable) "var " else "val ")
print(symName(tree, name))
printOpt(": ", tp)
if (!mods.hasFlag(DEFERRED)) {
@@ -215,7 +218,7 @@ abstract class TreePrinters {
}
case DocDef(comment, definition) =>
- print(comment.raw); println; print(definition)
+ print(comment.raw); println(); print(definition)
case Template(parents, self, body) =>
val currentOwner1 = currentOwner
@@ -281,10 +284,10 @@ abstract class TreePrinters {
print(lhs); print(" = "); print(rhs)
case If(cond, thenp, elsep) =>
- print("if ("); print(cond); print(")"); indent; println
+ print("if ("); print(cond); print(")"); indent; println()
print(thenp); undent
if (!elsep.isEmpty) {
- println; print("else"); indent; println; print(elsep); undent
+ println(); print("else"); indent; println(); print(elsep); undent
}
case Return(expr) =>
@@ -377,9 +380,6 @@ abstract class TreePrinters {
case SelectFromArray(qualifier, name, _) =>
print(qualifier); print(".<arr>"); print(symName(tree, name))
- case tree: StubTree =>
- print(tree.toString)
-
case tree =>
print("<unknown tree of class "+tree.getClass+">")
}
@@ -389,11 +389,11 @@ abstract class TreePrinters {
}
def print(tree: Tree) {
- if (settings.Xprintpos.value) print(tree.pos.show)
+ printPosition(tree)
printRaw(
if (tree.isDef && tree.symbol != NoSymbol && tree.symbol.isInitialized) {
tree match {
- case ClassDef(_, _, _, impl @ Template(ps, trees.emptyValDef, body))
+ case ClassDef(_, _, _, impl @ Template(ps, emptyValDef, body))
if (tree.symbol.thisSym != tree.symbol) =>
ClassDef(tree.symbol, Template(ps, ValDef(tree.symbol.thisSym), body))
case ClassDef(_, _, _, impl) => ClassDef(tree.symbol, impl)
@@ -409,19 +409,255 @@ abstract class TreePrinters {
def print(unit: CompilationUnit) {
print("// Scala source: " + unit.source + LINE_SEPARATOR)
if (unit.body ne null) {
- print(unit.body); println
+ print(unit.body); println()
} else {
print("<null>")
}
- println; flush
+ println(); flush
+ }
+ }
+
+ /** A tree printer which is stingier about vertical whitespace and unnecessary
+ * punctuation than the standard one.
+ */
+ class CompactTreePrinter(out: PrintWriter) extends TreePrinter(out) {
+ override def printRow(ts: List[Tree], start: String, sep: String, end: String) {
+ print(start)
+ printSeq(ts)(print)(print(sep))
+ print(end)
+ }
+
+ // drill down through Blocks and pull out the real statements.
+ def allStatements(t: Tree): List[Tree] = t match {
+ case Block(stmts, expr) => (stmts flatMap allStatements) ::: List(expr)
+ case _ => List(t)
+ }
+
+ def printLogicalOr(t1: (Tree, Boolean), t2: (Tree, Boolean)) =
+ printLogicalOp(t1, t2, "||")
+
+ def printLogicalAnd(t1: (Tree, Boolean), t2: (Tree, Boolean)) =
+ printLogicalOp(t1, t2, "&&")
+
+ def printLogicalOp(t1: (Tree, Boolean), t2: (Tree, Boolean), op: String) = {
+ def maybenot(tvalue: Boolean) = if (tvalue) "" else "!"
+
+ print("%s(" format maybenot(t1._2))
+ printRaw(t1._1)
+ print(") %s %s(".format(op, maybenot(t2._2)))
+ printRaw(t2._1)
+ print(")")
+ }
+
+ override def printRaw(tree: Tree): Unit = {
+ // routing supercalls through this for debugging ease
+ def s() = super.printRaw(tree)
+
+ tree match {
+ // labels used for jumps - does not map to valid scala code
+ case LabelDef(name, params, rhs) =>
+ print("labeldef %s(%s) = ".format(name, params mkString ","))
+ printRaw(rhs)
+
+ case Ident(name) =>
+ print(decodedSymName(tree, name))
+
+ // target.method(arg) ==> target method arg
+ case Apply(Select(target, method), List(arg)) =>
+ if (method.decode.toString == "||")
+ printLogicalOr(target -> true, arg -> true)
+ else if (method.decode.toString == "&&")
+ printLogicalAnd(target -> true, arg -> true)
+ else (target, arg) match {
+ case (_: Ident, _: Literal | _: Ident) =>
+ printRaw(target)
+ print(" ")
+ printRaw(Ident(method))
+ print(" ")
+ printRaw(arg)
+ case _ => s()
+ }
+
+ // target.unary_! ==> !target
+ case Select(qualifier, name) if (name.decode startsWith "unary_") =>
+ print(name.decode drop 6)
+ printRaw(qualifier)
+
+ case Select(qualifier, name) =>
+ printRaw(qualifier)
+ print(".")
+ print(name.decode)
+
+ // target.toString() ==> target.toString
+ case Apply(fn, Nil) => printRaw(fn)
+
+ // if a Block only continues one actual statement, just print it.
+ case Block(stats, expr) =>
+ allStatements(tree) match {
+ case List(x) => printRaw(x)
+ case xs => s()
+ }
+
+ // We get a lot of this stuff
+ case If( IsTrue(), x, _) => printRaw(x)
+ case If(IsFalse(), _, x) => printRaw(x)
+
+ case If(cond, IsTrue(), elsep) => printLogicalOr(cond -> true, elsep -> true)
+ case If(cond, IsFalse(), elsep) => printLogicalAnd(cond -> false, elsep -> true)
+ case If(cond, thenp, IsTrue()) => printLogicalOr(cond -> false, thenp -> true)
+ case If(cond, thenp, IsFalse()) => printLogicalAnd(cond -> true, thenp -> true)
+
+ // If thenp or elsep has only one statement, it doesn't need more than one line.
+ case If(cond, thenp, elsep) =>
+ def ifIndented(x: Tree) = {
+ indent ; println() ; printRaw(x) ; undent
+ }
+
+ val List(thenStmts, elseStmts) = List(thenp, elsep) map allStatements
+ print("if ("); print(cond); print(")")
+
+ thenStmts match {
+ case List(x: If) => ifIndented(x)
+ case List(x) => printRaw(x)
+ case _ => printRaw(thenp)
+ }
+
+ if (elseStmts.nonEmpty) {
+ print("else")
+ indent ; println()
+ elseStmts match {
+ case List(x) => printRaw(x)
+ case _ => printRaw(elsep)
+ }
+ undent ; println()
+ }
+ case _ => s()
+ }
+ }
+ }
+
+ /** This must guarantee not to force any evaluation, so we can learn
+ * a little bit about trees in the midst of compilation without altering
+ * the natural course of events.
+ */
+ class SafeTreePrinter(out: PrintWriter) extends TreePrinter(out) {
+ override def print(tree: Tree) {
+ printPosition(tree)
+ printRaw(tree)
+ }
+ private def default(t: Tree) = t.getClass.getName.reverse.takeWhile(_ != '.').reverse
+ private def params(trees: List[Tree]): String = trees map safe mkString ", "
+
+ private def safe(name: Name): String = name.decode
+ private def safe(tree: Tree): String = tree match {
+ case Apply(fn, args) => "%s(%s)".format(safe(fn), params(args))
+ case Select(qual, name) => safe(qual) + "." + safe(name)
+ case This(qual) => safe(qual) + ".this"
+ case Ident(name) => safe(name)
+ case Literal(value) => value.stringValue
+ case _ => "(?: %s)".format(default(tree))
+ }
+
+ override def printRaw(tree: Tree) { print(safe(tree)) }
+ }
+
+ class TreeMatchTemplate {
+ // non-trees defined in Trees
+ //
+ // case class ImportSelector(name: Name, namePos: Int, rename: Name, renamePos: Int)
+ // case class Modifiers(flags: Long, privateWithin: Name, annotations: List[Tree], positions: Map[Long, Position])
+ //
+ def apply(t: Tree): Unit = t match {
+ // eliminated by typer
+ case Annotated(annot, arg) =>
+ case AssignOrNamedArg(lhs, rhs) =>
+ case DocDef(comment, definition) =>
+ case Import(expr, selectors) =>
+
+ // eliminated by refchecks
+ case ModuleDef(mods, name, impl) =>
+
+ // eliminated by erasure
+ case TypeDef(mods, name, tparams, rhs) =>
+ case Typed(expr, tpt) =>
+
+ // eliminated by cleanup
+ case ApplyDynamic(qual, args) =>
+
+ // eliminated by explicitouter
+ case Alternative(trees) =>
+ case Bind(name, body) =>
+ case CaseDef(pat, guard, body) =>
+ case Star(elem) =>
+ case UnApply(fun, args) =>
+
+ // eliminated by lambdalift
+ case Function(vparams, body) =>
+
+ // eliminated by uncurry
+ case AppliedTypeTree(tpt, args) =>
+ case CompoundTypeTree(templ) =>
+ case ExistentialTypeTree(tpt, whereClauses) =>
+ case SelectFromTypeTree(qual, selector) =>
+ case SingletonTypeTree(ref) =>
+ case TypeBoundsTree(lo, hi) =>
+
+ // survivors
+ case Apply(fun, args) =>
+ case ArrayValue(elemtpt, trees) =>
+ case Assign(lhs, rhs) =>
+ case Block(stats, expr) =>
+ case ClassDef(mods, name, tparams, impl) =>
+ case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
+ case EmptyTree =>
+ case Ident(name) =>
+ case If(cond, thenp, elsep) =>
+ case LabelDef(name, params, rhs) =>
+ case Literal(value) =>
+ case Match(selector, cases) =>
+ case New(tpt) =>
+ case PackageDef(pid, stats) =>
+ case Return(expr) =>
+ case Select(qualifier, selector) =>
+ case Super(qual, mix) =>
+ case Template(parents, self, body) =>
+ case This(qual) =>
+ case Throw(expr) =>
+ case Try(block, catches, finalizer) =>
+ case TypeApply(fun, args) =>
+ case TypeTree() =>
+ case ValDef(mods, name, tpt, rhs) =>
+
+ // missing from the Trees comment
+ case Parens(args) => // only used during parsing
+ case SelectFromArray(qual, name, erasure) => // only used during erasure
}
}
- def create(writer: PrintWriter): TreePrinter = new TreePrinter(writer)
- def create(stream: OutputStream): TreePrinter = create(new PrintWriter(stream))
- def create(): TreePrinter = {
- create(new PrintWriter(ConsoleWriter))
+ private def asStringInternal(t: Tree, f: PrintWriter => TreePrinter): String = {
+ val buffer = new StringWriter()
+ val printer = f(new PrintWriter(buffer))
+ printer.print(t)
+ printer.flush()
+ buffer.toString
}
+ def asString(t: Tree): String = asStringInternal(t, newStandardTreePrinter)
+ def asCompactString(t: Tree): String = asStringInternal(t, newCompactTreePrinter)
+
+ def newStandardTreePrinter(writer: PrintWriter): TreePrinter = new TreePrinter(writer)
+ def newStandardTreePrinter(stream: OutputStream): TreePrinter = newStandardTreePrinter(new PrintWriter(stream))
+ def newStandardTreePrinter(): TreePrinter = newStandardTreePrinter(new PrintWriter(ConsoleWriter))
+
+ def newCompactTreePrinter(writer: PrintWriter): CompactTreePrinter = new CompactTreePrinter(writer)
+ def newCompactTreePrinter(stream: OutputStream): CompactTreePrinter = newCompactTreePrinter(new PrintWriter(stream))
+ def newCompactTreePrinter(): CompactTreePrinter = newCompactTreePrinter(new PrintWriter(ConsoleWriter))
+
+ def newTreePrinter(writer: PrintWriter): TreePrinter =
+ if (settings.Ycompacttrees.value) newCompactTreePrinter(writer)
+ else newStandardTreePrinter(writer)
+ def newTreePrinter(stream: OutputStream): TreePrinter = newTreePrinter(new PrintWriter(stream))
+ def newTreePrinter(): TreePrinter = newTreePrinter(new PrintWriter(ConsoleWriter))
+
/** A writer that writes to the current Console and
* is sensitive to replacement of the Console's
* output stream.
diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala
index c4c9af5294..9097dd460e 100644
--- a/src/compiler/scala/tools/nsc/ast/Trees.scala
+++ b/src/compiler/scala/tools/nsc/ast/Trees.scala
@@ -7,20 +7,12 @@
package scala.tools.nsc
package ast
-import java.io.{PrintWriter, StringWriter}
-
import scala.collection.mutable.ListBuffer
-import scala.tools.nsc.symtab.{Flags, SymbolTable}
+import scala.tools.nsc.symtab.SymbolTable
import scala.tools.nsc.symtab.Flags._
-import scala.tools.nsc.util.{FreshNameCreator, HashSet, Position, NoPosition, SourceFile}
-
-
-trait Trees {
- self: SymbolTable =>
+import scala.tools.nsc.util.{FreshNameCreator, HashSet, SourceFile}
- //statistics
-
- var nodeCount = 0
+trait Trees extends reflect.generic.Trees { self: SymbolTable =>
trait CompilationUnitTrait {
var body: Tree
@@ -32,11 +24,7 @@ trait Trees {
// sub-components --------------------------------------------------
- object treePrinters extends {
- val trees: Trees.this.type = Trees.this
- } with TreePrinters
-
- lazy val treePrinter = treePrinters.create()
+ lazy val treePrinter = newTreePrinter()
object treeInfo extends {
val trees: Trees.this.type = Trees.this
@@ -44,131 +32,39 @@ trait Trees {
val treeCopy = new LazyTreeCopier()
- // modifiers --------------------------------------------------------
-
- /** @param privateWithin the qualifier for a private (a type name)
- * or nme.EMPTY.toTypeName, if none is given.
- * @param annotations the annotations for the definition.
- * <strong>Note:</strong> the typechecker drops these annotations,
- * use the AnnotationInfo's (Symbol.annotations) in later phases.
- */
- case class Modifiers(flags: Long, privateWithin: Name, annotations: List[Tree], positions: Map[Long, Position]) {
- def isCovariant = hasFlag(COVARIANT ) // marked with `+'
- def isContravariant = hasFlag(CONTRAVARIANT) // marked with `-'
- def isPrivate = hasFlag(PRIVATE )
- def isProtected = hasFlag(PROTECTED)
- def isVariable = hasFlag(MUTABLE )
- def isArgument = hasFlag(PARAM )
- def isAccessor = hasFlag(ACCESSOR )
- def isOverride = hasFlag(OVERRIDE )
- def isAbstract = hasFlag(ABSTRACT )
- def isDeferred = hasFlag(DEFERRED )
- def isCase = hasFlag(CASE )
- def isLazy = hasFlag(LAZY )
- def isSealed = hasFlag(SEALED )
- def isFinal = hasFlag(FINAL )
- def isTrait = hasFlag(TRAIT | notDEFERRED) // (part of DEVIRTUALIZE)
- def isImplicit = hasFlag(IMPLICIT )
- def isPublic = !isPrivate && !isProtected
- def hasFlag(flag: Long) = (flag & flags) != 0L
- def & (flag: Long): Modifiers = {
- val flags1 = flags & flag
- if (flags1 == flags) this
- else Modifiers(flags1, privateWithin, annotations, positions)
- }
- def &~ (flag: Long): Modifiers = {
- val flags1 = flags & (~flag)
- if (flags1 == flags) this
- else Modifiers(flags1, privateWithin, annotations, positions)
- }
- def | (flag: Long): Modifiers = {
- val flags1 = flags | flag
- if (flags1 == flags) this
- else Modifiers(flags1, privateWithin, annotations, positions)
- }
- def withAnnotations(annots: List[Tree]) =
- if (annots.isEmpty) this
- else Modifiers(flags, privateWithin, annotations ::: annots, positions)
- def withPosition(flag: Long, position: Position) =
- Modifiers(flags, privateWithin, annotations, positions + (flag -> position))
- }
-
- def Modifiers(flags: Long, privateWithin: Name): Modifiers = Modifiers(flags, privateWithin, List(), new Map.EmptyMap)
- def Modifiers(flags: Long): Modifiers = Modifiers(flags, nme.EMPTY.toTypeName)
-
- val NoMods = Modifiers(0)
-
- // @M helper method for asserts that check consistency in kinding
- //def kindingIrrelevant(tp: Type) = (tp eq null) || phase.name == "erasure" || phase.erasedTypes
-
- abstract class Tree extends Product {
- if (util.Statistics.enabled) {
- util.Statistics.nodeByType(getClass) += 1
- }
-
- val id = nodeCount
-// assert(id != 1223)
- nodeCount += 1
-
- private var rawpos: Position = NoPosition
-
- def pos = rawpos
-
- private[this] var rawtpe: Type = _
-
- def tpe = rawtpe
- def tpe_=(t: Type) = rawtpe = t
+ implicit def treeWrapper(tree: Tree): TreeOps = new TreeOps(tree)
- def setPos(pos: Position): this.type = {
- rawpos = pos
-/*
- for (c <- this.children)
- if (c.pos.isOpaqueRange && !pos.includes(c.pos)) {
- assert(false, "non-enclosing positions in "+this)
- }
-*/
- this
- }
+ class TreeOps(tree: Tree) {
- /** Set tpe to give `tp` and return this.
- */
- def setType(tp: Type): this.type = {
- /*assert(kindingIrrelevant(tp) || !kindStar || !tp.isHigherKinded,
- tp+" should not be higher-kinded"); */
- tpe = tp
- this
+ def isTerm: Boolean = tree match {
+ case _: TermTree => true
+ case Bind(name, _) => name.isTermName
+ case Select(_, name) => name.isTermName
+ case Ident(name) => name.isTermName
+ case Annotated(_, arg) => arg.isTerm
+ case DocDef(_, defn) => defn.isTerm
+ case _ => false
}
- /** Like `setType`, but if this is a previously empty TypeTree
- * that fact is remembered so that resetType will snap back.
- */
- def defineType(tp: Type): this.type = setType(tp)
-
- /** Reset type to `null`, with special handling of TypeTrees and the EmptyType
- */
- def resetType() { tpe = null }
-
- def symbol: Symbol = null
- def symbol_=(sym: Symbol) {
- throw new Error("symbol_= inapplicable for " + this)
+ def isType: Boolean = tree match {
+ case _: TypTree => true
+ case Bind(name, _) => name.isTypeName
+ case Select(_, name) => name.isTypeName
+ case Ident(name) => name.isTypeName
+ case Annotated(_, arg) => arg.isType
+ case DocDef(_, defn) => defn.isType
+ case _ => false
}
- def setSymbol(sym: Symbol): this.type = { symbol = sym; this }
-
- def hasSymbol = false
- def isDef = false
- def isTerm = false
- def isType = false
- def isEmpty = false
- def isErroneous = (tpe ne null) && tpe.isErroneous
+ def isErroneous = (tree.tpe ne null) && tree.tpe.isErroneous
/** Apply `f' to each subtree */
- def foreach(f: Tree => Unit) { new ForeachTreeTraverser(f).traverse(this) }
+ def foreach(f: Tree => Unit) { new ForeachTreeTraverser(f).traverse(tree) }
/** Find all subtrees matching predicate `p' */
def filter(f: Tree => Boolean): List[Tree] = {
val ft = new FilterTreeTraverser(f)
- ft.traverse(this)
+ ft.traverse(tree)
ft.hits.toList
}
@@ -177,205 +73,41 @@ trait Trees {
*/
def find(p: Tree => Boolean): Option[Tree] = {
val ft = new FindTreeTraverser(p)
- ft.traverse(this)
+ ft.traverse(tree)
ft.result
}
/** Is there part of this tree which satisfies predicate `p'? */
def exists(p: Tree => Boolean): Boolean = !find(p).isEmpty
- /** The direct child trees of this tree
- * EmptyTrees are always omitted. Lists are collapsed.
- */
- def children: List[Tree] = {
- def subtrees(x: Any): List[Tree] = x match {
- case EmptyTree => List()
- case t: Tree => List(t)
- case xs: List[_] => xs flatMap subtrees
- case _ => List()
- }
- productIterator.toList flatMap subtrees
- }
-
- override def toString(): String = {
- val buffer = new StringWriter()
- val printer = treePrinters.create(new PrintWriter(buffer))
- printer.print(this)
- printer.flush()
- buffer.toString
- }
-
- override def hashCode(): Int = super.hashCode()
-
- override def equals(that: Any): Boolean = that match {
- case t: Tree => this eq t
- case _ => false
- }
- def hashCodeStructure: Int = {
- var hc = getClass.hashCode
- def f(what : Any) : Unit = what match {
- case what : Tree => hc += what.hashCodeStructure
- case what : Iterable[_] => what.foreach(f)
- case what : Product => g(what)
- case null =>
- case what => hc += what.hashCode
- }
- def g(what: Product) {
- hc += what.productArity
- var i = 0
- while (i < what.productArity) {
- f(what.productElement(i))
- i += 1
+ def equalsStructure(that : Tree) = equalsStructure0(that)(_ eq _)
+ def equalsStructure0(that: Tree)(f: (Tree,Tree) => Boolean): Boolean =
+ (tree == that) || ((tree.getClass == that.getClass) && { // XXX defining any kind of equality in terms of getClass is a mistake
+ assert(tree.productArity == that.productArity)
+ def equals0(this0: Any, that0: Any): Boolean = (this0, that0) match {
+ case (x: Tree, y: Tree) => f(x, y) || (x equalsStructure0 y)(f)
+ case (xs: List[_], ys: List[_]) => (xs corresponds ys)(equals0)
+ case _ => this0 == that0
+ }
+ def compareOriginals() = (this, that) match {
+ case (x: TypeTree, y: TypeTree) if x.original != null && y.original != null =>
+ (x.original equalsStructure0 y.original)(f)
+ case _ =>
+ true
}
- }
- g(this)
- hc
- }
- def equalsStructure(that : Tree) = equalsStructure0(that){case (t0,t1) => false}
- def equalsStructure0(that: Tree)(f : (Tree,Tree) => Boolean): Boolean = {
- if (this == that) return true
- if (this.getClass != that.getClass) return false
- val this0 = this.asInstanceOf[Product]
- val that0 = that.asInstanceOf[Product]
- assert(this0.productArity == that0.productArity)
- def equals0(thiz: Any, that: Any): Boolean = thiz match {
- case thiz: Tree =>
- f(thiz,that.asInstanceOf[Tree]) || thiz.equalsStructure0(that.asInstanceOf[Tree])(f)
- case thiz: List[_] =>
- val that0 = that.asInstanceOf[List[Any]]
- if (thiz.length != that0.length) false
- else {
- val results0 = for (i <- 0.until(thiz.length).toList)
- yield equals0(thiz(i), that0(i))
- results0.foldLeft(true)((x,y) => x && y)
- }
- case thiz =>
- thiz == that
- }
- val results = for (i <- 0.until(this0.productArity).toList) yield
- equals0(this0.productElement(i), that0.productElement(i))
- val b = results.foldLeft(true)((x,y) => x && y)
- if (b) (this,that) match {
- case (this0 : TypeTree, that0 : TypeTree) if this0.original != null && that0.original != null =>
- this0.original.equalsStructure0(that0.original)(f)
- case _ => true
- } else false
- }
-
- /** Make a copy of this tree, keeping all attributes,
- * except that all positions are focussed (so nothing
- * in this tree will be found when searching by position).
- */
- def duplicate: this.type =
- (duplicator transform this).asInstanceOf[this.type]
-
- def shallowDuplicate: this.type =
- ((new ShallowDuplicator(this)) transform this).asInstanceOf[this.type]
-
- def copyAttrs(tree: Tree): this.type = {
- rawpos = tree.rawpos
- tpe = tree.tpe
- if (hasSymbol) symbol = tree.symbol
- this
- }
- }
-
- trait SymTree extends Tree {
- override def hasSymbol = true
- override var symbol: Symbol = NoSymbol
- }
-
- trait RefTree extends SymTree {
- def name: Name
- }
-
- abstract class DefTree extends SymTree {
- def name: Name
- override def isDef = true
- }
-
- trait TermTree extends Tree {
- override def isTerm = true
- }
-
- /** A tree for a type. Note that not all type trees implement
- * this trait; in particular, Ident's are an exception. */
- trait TypTree extends Tree {
- override def isType = true
- }
-
-// ----- auxiliary objects and methods ------------------------------
-
- private lazy val duplicator = new Transformer {
- override val treeCopy = new StrictTreeCopier
- override def transform(t: Tree) = {
- val t1 = super.transform(t)
- if ((t1 ne t) && t1.pos.isRange) t1 setPos t.pos.focus
- t1
- }
- }
-
- private class ShallowDuplicator(orig: Tree) extends Transformer {
- override val treeCopy = new StrictTreeCopier
- override def transform(tree: Tree) =
- if (tree eq orig)
- super.transform(tree)
- else
- tree
- }
-
-// def nextPhase = if (phase.id > globalPhase.id) phase else phase.next;
-
-// ----- tree node alternatives --------------------------------------
-
- /** The empty tree */
- case object EmptyTree extends TermTree {
- super.tpe_=(NoType)
- override def tpe_=(t: Type) =
- if (t != NoType) throw new Error("tpe_=("+t+") inapplicable for <empty>")
- override def resetType() {}
- override def isEmpty = true
- }
- abstract class MemberDef extends DefTree {
- def mods: Modifiers
- def keyword: String = this match {
- case TypeDef(_, _, _, _) => "type"
- case ClassDef(mods, _, _, _) => if (mods.isTrait) "trait" else "class"
- case DefDef(_, _, _, _, _, _) => "def"
- case ModuleDef(_, _, _) => "object"
- case PackageDef(_, _) => "package"
- case ValDef(mods, _, _, _) => if (mods.isVariable) "var" else "val"
- case _ => ""
- }
- final def hasFlag(mask: Long): Boolean = (mods.flags & mask) != 0L
- }
+ (tree.productIterator.toList corresponds that.productIterator.toList)(equals0) && compareOriginals()
+ })
- /** Package clause
- */
- case class PackageDef(pid: RefTree, stats: List[Tree])
- extends MemberDef {
- def name = pid.name
- def mods = NoMods
+ def shallowDuplicate: Tree = new ShallowDuplicator(tree) transform tree
}
-/* disabled, as this is now dangerous
- def PackageDef(sym: Symbol, stats: List[Tree]): PackageDef =
- PackageDef(Ident(sym.name), stats) setSymbol sym
-*/
+ private[scala] override def duplicateTree(tree: Tree): Tree = duplicator transform tree
- abstract class ImplDef extends MemberDef {
- def impl: Template
- }
-
- /** Class definition */
- case class ClassDef(mods: Modifiers, name: Name, tparams: List[TypeDef], impl: Template)
- extends ImplDef
+// ---- values and creators ---------------------------------------
- /**
- * @param sym the class symbol
- * @param impl ...
- * @return ...
+ /** @param sym the class symbol
+ * @return the implementation template
*/
def ClassDef(sym: Symbol, impl: Template): ClassDef =
atPos(sym.pos) {
@@ -395,7 +127,6 @@ trait Trees {
* @param argss the supercall arguments
* @param body the template statements without primary constructor
* and value parameter fields.
- * @return ...
*/
def ClassDef(sym: Symbol, constrMods: Modifiers, vparamss: List[List[ValDef]], argss: List[List[Tree]], body: List[Tree], superPos: Position): ClassDef =
ClassDef(sym,
@@ -403,45 +134,15 @@ trait Trees {
if (sym.thisSym == sym || phase.erasedTypes) emptyValDef else ValDef(sym.thisSym),
constrMods, vparamss, argss, body, superPos))
- /** Singleton object definition
- *
- * @param mods
- * @param name
- * @param impl
- */
- case class ModuleDef(mods: Modifiers, name: Name, impl: Template)
- extends ImplDef
-
/**
* @param sym the class symbol
- * @param impl ...
- * @return ...
+ * @param impl the implementation template
*/
def ModuleDef(sym: Symbol, impl: Template): ModuleDef =
atPos(sym.pos) {
ModuleDef(Modifiers(sym.flags), sym.name, impl) setSymbol sym
}
- abstract class ValOrDefDef extends MemberDef {
- def tpt: Tree
- def rhs: Tree
- }
-
- /** Value definition
- *
- * @param mods
- * @param name
- * @param tpt
- * @param rhs
- */
- case class ValDef(mods: Modifiers, name: Name, tpt: Tree, rhs: Tree)
- extends ValOrDefDef {
- assert(tpt.isType, tpt)
- //assert(kindingIrrelevant(tpt.tpe) || !tpt.tpe.isHigherKinded, tpt.tpe) //@M a value definition should never be typed with a higher-kinded type (values must be classified by types with kind *)
- //tpt.kindStar=true //@M turn on consistency checking in Tree
- assert(rhs.isTerm, rhs)
- }
-
def ValDef(sym: Symbol, rhs: Tree): ValDef =
atPos(sym.pos) {
ValDef(Modifiers(sym.flags), sym.name,
@@ -451,31 +152,12 @@ trait Trees {
def ValDef(sym: Symbol): ValDef = ValDef(sym, EmptyTree)
- object emptyValDef
- extends ValDef(Modifiers(PRIVATE), nme.WILDCARD, TypeTree(NoType), EmptyTree) {
+ object emptyValDef extends ValDef(Modifiers(PRIVATE), nme.WILDCARD, TypeTree(NoType), EmptyTree) {
override def isEmpty = true
super.setPos(NoPosition)
override def setPos(pos: Position) = { assert(false); this }
}
- /** Method definition
- *
- * @param mods
- * @param name
- * @param tparams
- * @param vparamss
- * @param tpt
- * @param rhs
- */
- case class DefDef(mods: Modifiers, name: Name, tparams: List[TypeDef],
- vparamss: List[List[ValDef]], tpt: Tree, rhs: Tree)
- extends ValOrDefDef {
- assert(tpt.isType, tpt)
- //assert(kindingIrrelevant(tpt.tpe) || !tpt.tpe.isHigherKinded, tpt.tpe) //@M a method definition should never be typed with a higher-kinded type (values must be classified by types with kind *)
- //tpt.kindStar=true //@M turn on consistency checking in Tree
- assert(rhs.isTerm, rhs)
- }
-
def DefDef(sym: Symbol, mods: Modifiers, vparamss: List[List[ValDef]], rhs: Tree): DefDef =
atPos(sym.pos) {
assert(sym != NoSymbol)
@@ -500,10 +182,6 @@ trait Trees {
DefDef(sym, rhs(sym.info.paramss))
}
- /** Abstract type, type parameter, or type alias */
- case class TypeDef(mods: Modifiers, name: Name, tparams: List[TypeDef], rhs: Tree)
- extends MemberDef
-
/** A TypeDef node which defines given `sym' with given tight hand side `rhs'. */
def TypeDef(sym: Symbol, rhs: Tree): TypeDef =
atPos(sym.pos) {
@@ -514,91 +192,11 @@ trait Trees {
def TypeDef(sym: Symbol): TypeDef =
TypeDef(sym, TypeBoundsTree(TypeTree(sym.info.bounds.lo), TypeTree(sym.info.bounds.hi)))
- /** <p>
- * Labelled expression - the symbols in the array (must be Idents!)
- * are those the label takes as argument
- * </p>
- * <p>
- * The symbol that is given to the labeldef should have a MethodType
- * (as if it were a nested function)
- * </p>
- * <p>
- * Jumps are apply nodes attributed with label symbol, the arguments
- * will get assigned to the idents.
- * </p>
- * <p>
- * Note: on 2005-06-09 Martin, Iuli, Burak agreed to have forward
- * jumps within a Block.
- * </p>
- */
- case class LabelDef(name: Name, params: List[Ident], rhs: Tree)
- extends DefTree with TermTree {
- assert(rhs.isTerm)
- }
-
- /**
- * @param sym the class symbol
- * @param params ...
- * @param rhs ...
- * @return ...
- */
def LabelDef(sym: Symbol, params: List[Symbol], rhs: Tree): LabelDef =
atPos(sym.pos) {
LabelDef(sym.name, params map Ident, rhs) setSymbol sym
}
- /** Import selector
- *
- * Representation of an imported name its optional rename and their optional positions
- *
- * @param name the imported name
- * @param namePos its position or -1 if undefined
- * @param rename the name the import is renamed to (== name if no renaming)
- * @param renamePos the position of the rename or -1 if undefined
- */
- case class ImportSelector(name: Name, namePos: Int, rename: Name, renamePos: Int)
-
- /** Import clause
- *
- * @param expr
- * @param selectors
- */
- case class Import(expr: Tree, selectors: List[ImportSelector])
- extends SymTree
- // The symbol of an Import is an import symbol @see Symbol.newImport
- // It's used primarily as a marker to check that the import has been typechecked.
-
- /** Documented definition, eliminated by analyzer */
- case class DocDef(comment: DocComment, definition: Tree)
- extends Tree {
- override def symbol: Symbol = definition.symbol
- override def symbol_=(sym: Symbol) { definition.symbol = sym }
- // sean: seems to be important to the IDE
- override def isDef = definition.isDef
- override def isTerm = definition.isTerm
- override def isType = definition.isType
- }
-
- /** Instantiation template of a class or trait
- *
- * @param parents
- * @param body
- */
- case class Template(parents: List[Tree], self: ValDef, body: List[Tree])
- extends SymTree {
- // the symbol of a template is a local dummy. @see Symbol.newLocalDummy
- // the owner of the local dummy is the enclosing trait or class.
- // the local dummy is itself the owner of any local blocks
- // For example:
- //
- // class C {
- // def foo // owner is C
- // {
- // def bar // owner is local dummy
- // }
- // System.err.println("TEMPLATE: " + parents)
- }
-
/** Generates a template with constructor corresponding to
*
* constrmods (vparams1_) ... (vparams_n) preSuper { presupers }
@@ -625,7 +223,7 @@ trait Trees {
atPos(vd.pos.focus) {
val pa = if (vd.hasFlag(PRIVATE | LOCAL)) 0L else PARAMACCESSOR
ValDef(
- Modifiers(vd.mods.flags & (IMPLICIT | DEFAULTPARAM) | PARAM | pa) withAnnotations vd.mods.annotations,
+ Modifiers(vd.mods.flags & (IMPLICIT | DEFAULTPARAM | BYNAMEPARAM) | PARAM | pa) withAnnotations vd.mods.annotations,
vd.name, vd.tpt.duplicate, vd.rhs.duplicate)
}})
val (edefs, rest) = body span treeInfo.isEarlyDef
@@ -669,119 +267,15 @@ trait Trees {
Template(parents, self, gvdefs ::: vparamss2.flatten ::: constrs ::: etdefs ::: rest)
}
- /** Block of expressions (semicolon separated expressions) */
- case class Block(stats: List[Tree], expr: Tree)
- extends TermTree
-
- /** Case clause in a pattern match, eliminated by TransMatch
- * (except for occurences in switch statements)
- */
- case class CaseDef(pat: Tree, guard: Tree, body: Tree)
- extends Tree
-
/** casedef shorthand */
def CaseDef(pat: Tree, body: Tree): CaseDef = CaseDef(pat, EmptyTree, body)
- /** Alternatives of patterns, eliminated by TransMatch, except for
- * occurences in encoded Switch stmt (=remaining Match(CaseDef(...))
- */
- case class Alternative(trees: List[Tree])
- extends TermTree
-
- /** Repetition of pattern, eliminated by TransMatch */
- case class Star(elem: Tree)
- extends TermTree
-
- /** Bind of a variable to a rhs pattern, eliminated by TransMatch
- *
- * @param name
- * @param body
- */
- case class Bind(name: Name, body: Tree)
- extends DefTree {
- override def isTerm = name.isTermName
- override def isType = name.isTypeName
- }
-
def Bind(sym: Symbol, body: Tree): Bind =
Bind(sym.name, body) setSymbol sym
- case class UnApply(fun: Tree, args: List[Tree])
- extends TermTree
-
- /** Array of expressions, needs to be translated in backend,
- */
- case class ArrayValue(elemtpt: Tree, elems: List[Tree])
- extends TermTree
-
- /** Anonymous function, eliminated by analyzer */
- case class Function(vparams: List[ValDef], body: Tree)
- extends TermTree with SymTree
- // The symbol of a Function is a synthetic value of name nme.ANON_FUN_NAME
- // It is the owner of the function's parameters.
-
- /** Assignment */
- case class Assign(lhs: Tree, rhs: Tree)
- extends TermTree
-
- /** Either an assignment or a named argument. Only appears in argument lists,
- * eliminated by typecheck (doTypedApply)
- */
- case class AssignOrNamedArg(lhs: Tree, rhs: Tree)
- extends TermTree
-
- /** Conditional expression */
- case class If(cond: Tree, thenp: Tree, elsep: Tree)
- extends TermTree
-
- /** <p>
- * Pattern matching expression (before <code>TransMatch</code>)
- * Switch statements (after TransMatch)
- * </p>
- * <p>
- * After <code>TransMatch</code>, cases will satisfy the following
- * constraints:
- * </p>
- * <ul>
- * <li>all guards are EmptyTree,</li>
- * <li>all patterns will be either <code>Literal(Constant(x:Int))</code>
- * or <code>Alternative(lit|...|lit)</code></li>
- * <li>except for an "otherwise" branch, which has pattern
- * <code>Ident(nme.WILDCARD)</code></li>
- * </ul>
- */
- case class Match(selector: Tree, cases: List[CaseDef])
- extends TermTree
-
- /** Return expression */
- case class Return(expr: Tree)
- extends TermTree with SymTree
- // The symbol of a Return node is the enclosing method.
- case class Try(block: Tree, catches: List[CaseDef], finalizer: Tree)
- extends TermTree
-
- /** Throw expression */
- case class Throw(expr: Tree)
- extends TermTree
-
- /** Object instantiation
- * One should always use factory method below to build a user level new.
- *
- * @param tpt a class type
- */
- case class New(tpt: Tree)
- extends TermTree {
- assert(tpt.isType)
- }
-
- /** Factory method for object creation <code>&lt;new tpt(args_1)...(args_n)&gt;</code>.
- * A New(t, as) is expanded to:
- * (new t).<init>(as)
- *
- * @param tpt ...
- * @param argss ...
- * @return ...
+ /** Factory method for object creation `new tpt(args_1)...(args_n)`
+ * A `New(t, as)` is expanded to: `(new t).<init>(as)`
*/
def New(tpt: Tree, argss: List[List[Tree]]): Tree = {
assert(!argss.isEmpty)
@@ -789,278 +283,62 @@ trait Trees {
(superRef /: argss) (Apply)
}
- /** Type annotation, eliminated by explicit outer */
- case class Typed(expr: Tree, tpt: Tree)
- extends TermTree
-
- // Martin to Sean: Should GenericApply/TypeApply/Apply not be SymTree's? After all,
- // ApplyDynamic is a SymTree.
- abstract class GenericApply extends TermTree {
- val fun: Tree
- val args: List[Tree]
- }
-
- /** Type application */
- case class TypeApply(fun: Tree, args: List[Tree])
- extends GenericApply {
- override def symbol: Symbol = fun.symbol
- override def symbol_=(sym: Symbol) { fun.symbol = sym }
- }
-
- /** Value application */
- case class Apply(fun: Tree, args: List[Tree])
- extends GenericApply {
- override def symbol: Symbol = fun.symbol
- override def symbol_=(sym: Symbol) { fun.symbol = sym }
- }
-
- /** Dynamic value application.
- * In a dynamic application q.f(as)
- * - q is stored in qual
- * - as is stored in args
- * - f is stored as the node's symbol field.
- */
- case class ApplyDynamic(qual: Tree, args: List[Tree])
- extends TermTree with SymTree
- // The symbol of an ApplyDynamic is the function symbol of `qual', or NoSymbol, if there is none.
-
- /** Super reference */
- case class Super(qual: Name, mix: Name)
- extends TermTree with SymTree
- // The symbol of a Super is the class _from_ which the super reference is made.
- // For instance in C.super(...), it would be C.
-
def Super(sym: Symbol, mix: Name): Tree = Super(sym.name, mix) setSymbol sym
- /** Self reference */
- case class This(qual: Name)
- extends TermTree with SymTree
- // The symbol of a This is the class to which the this refers.
- // For instance in C.this, it would be C.
-
def This(sym: Symbol): Tree = This(sym.name) setSymbol sym
- /** Designator <qualifier> . <name> */
- case class Select(qualifier: Tree, name: Name)
- extends RefTree {
- override def isTerm = name.isTermName
- override def isType = name.isTypeName
- }
-
def Select(qualifier: Tree, sym: Symbol): Select =
Select(qualifier, sym.name) setSymbol sym
- /** Identifier <name> */
- case class Ident(name: Name)
- extends RefTree {
- override def isTerm = name.isTermName
- override def isType = name.isTypeName
- }
-
- class BackQuotedIdent(name: Name) extends Ident(name)
-
def Ident(sym: Symbol): Ident =
Ident(sym.name) setSymbol sym
- /** Literal */
- case class Literal(value: Constant)
- extends TermTree {
- assert(value ne null)
- }
-
- def Literal(value: Any): Literal =
- Literal(Constant(value))
-
/** A synthetic term holding an arbitrary type. Not to be confused with
* with TypTree, the trait for trees that are only used for type trees.
* TypeTree's are inserted in several places, but most notably in
* <code>RefCheck</code>, where the arbitrary type trees are all replaced by
* TypeTree's. */
- case class TypeTree() extends TypTree {
- override def symbol = if (tpe == null) null else tpe.typeSymbol
-
+ case class TypeTree() extends AbsTypeTree {
private var orig: Tree = null
- private var wasEmpty: Boolean = false
+ private[Trees] var wasEmpty: Boolean = false
def original: Tree = orig
-
def setOriginal(tree: Tree): this.type = { orig = tree; setPos(tree.pos); this }
override def defineType(tp: Type): this.type = {
wasEmpty = isEmpty
setType(tp)
}
-
- /** Reset type to null, unless type original was empty and then
- * got its type via a defineType
- */
- override def resetType() {
- if (wasEmpty) tpe = null
- }
-
- override def isEmpty = (tpe eq null) || tpe == NoType
- }
-
- def TypeTree(tp: Type): TypeTree = TypeTree() setType tp
- // def TypeTree(tp: Type, tree : Tree): TypeTree = TypeTree(tree) setType tp
-
- /** A tree that has an annotation attached to it. Only used for annotated types and
- * annotation ascriptions, annotations on definitions are stored in the Modifiers.
- * Eliminated by typechecker (typedAnnotated), the annotations are then stored in
- * an AnnotatedType.
- */
- case class Annotated(annot: Tree, arg: Tree) extends Tree {
- override def isType = arg.isType
- override def isTerm = arg.isTerm
}
- /** Singleton type, eliminated by RefCheck */
- case class SingletonTypeTree(ref: Tree)
- extends TypTree
-
- /** Type selection <qualifier> # <name>, eliminated by RefCheck */
- case class SelectFromTypeTree(qualifier: Tree, name: Name)
- extends TypTree with RefTree
+ object TypeTree extends TypeTreeExtractor
- /** Intersection type <parent1> with ... with <parentN> { <decls> }, eliminated by RefCheck */
- case class CompoundTypeTree(templ: Template)
- extends TypTree
+ def TypeTree(tp: Type): TypeTree = TypeTree() setType tp
- /** Applied type <tpt> [ <args> ], eliminated by RefCheck */
- case class AppliedTypeTree(tpt: Tree, args: List[Tree])
- extends TypTree {
- override def symbol: Symbol = tpt.symbol
- override def symbol_=(sym: Symbol) { tpt.symbol = sym }
+ /** Documented definition, eliminated by analyzer */
+ case class DocDef(comment: DocComment, definition: Tree)
+ extends Tree {
+ override def symbol: Symbol = definition.symbol
+ override def symbol_=(sym: Symbol) { definition.symbol = sym }
+ // sean: seems to be important to the IDE
+ override def isDef = definition.isDef
}
- case class TypeBoundsTree(lo: Tree, hi: Tree)
- extends TypTree
-
- case class ExistentialTypeTree(tpt: Tree, whereClauses: List[Tree])
- extends TypTree
+ /** Either an assignment or a named argument. Only appears in argument lists,
+ * eliminated by typecheck (doTypedApply)
+ */
+ case class AssignOrNamedArg(lhs: Tree, rhs: Tree)
+ extends TermTree
case class Parens(args: List[Tree]) extends Tree // only used during parsing
- /** Array selection <qualifier> . <name> only used during erasure */
- case class SelectFromArray(qualifier: Tree, name: Name, erasure: Type)
- extends TermTree with RefTree
+// ----- subconstructors --------------------------------------------
- trait StubTree extends Tree {
- def underlying : AnyRef
- override def equalsStructure0(that: Tree)(f : (Tree,Tree) => Boolean): Boolean = this eq that
- }
+ class ApplyToImplicitArgs(fun: Tree, args: List[Tree]) extends Apply(fun, args)
+
+ class ApplyImplicitView(fun: Tree, args: List[Tree]) extends Apply(fun, args)
-/* A standard pattern match
- case EmptyTree =>
- case PackageDef(pid, stats) =>
- // package pid { stats }
- case ClassDef(mods, name, tparams, impl) =>
- // mods class name [tparams] impl where impl = extends parents { defs }
- case ModuleDef(mods, name, impl) => (eliminated by refcheck)
- // mods object name impl where impl = extends parents { defs }
- case ValDef(mods, name, tpt, rhs) =>
- // mods val name: tpt = rhs
- // note missing type information is expressed by tpt = TypeTree()
- case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
- // mods def name[tparams](vparams_1)...(vparams_n): tpt = rhs
- // note missing type information is expressed by tpt = TypeTree()
- case TypeDef(mods, name, tparams, rhs) => (eliminated by erasure)
- // mods type name[tparams] = rhs
- // mods type name[tparams] >: lo <: hi, where lo, hi are in a TypeBoundsTree,
- and DEFERRED is set in mods
- case LabelDef(name, params, rhs) =>
- // used for tailcalls and like
- // while/do are desugared to label defs as follows:
- // while (cond) body ==> LabelDef($L, List(), if (cond) { body; L$() } else ())
- // do body while (cond) ==> LabelDef($L, List(), body; if (cond) L$() else ())
- case Import(expr, selectors) => (eliminated by typecheck)
- // import expr.{selectors}
- // Selectors are a list of pairs of names (from, to).
- // The last (and maybe only name) may be a nme.WILDCARD
- // for instance
- // import qual.{x, y => z, _} would be represented as
- // Import(qual, List(("x", "x"), ("y", "z"), (WILDCARD, null)))
- case DocDef(comment, definition) => (eliminated by typecheck)
- // /** comment */ definition
- case Template(parents, self, body) =>
- // extends parents { self => body }
- // if self is missing it is represented as emptyValDef
- case Block(stats, expr) =>
- // { stats; expr }
- case CaseDef(pat, guard, body) => (eliminated by transmatch/explicitouter)
- // case pat if guard => body
- case Alternative(trees) => (eliminated by transmatch/explicitouter)
- // pat1 | ... | patn
- case Star(elem) => (eliminated by transmatch/explicitouter)
- // pat*
- case Bind(name, body) => (eliminated by transmatch/explicitouter)
- // name @ pat
- case UnApply(fun: Tree, args) (introduced by typer, eliminated by transmatch/explicitouter)
- // used for unapply's
- case ArrayValue(elemtpt, trees) => (introduced by uncurry)
- // used to pass arguments to vararg arguments
- // for instance, printf("%s%d", foo, 42) is translated to after uncurry to:
- // Apply(
- // Ident("printf"),
- // Literal("%s%d"),
- // ArrayValue(<Any>, List(Ident("foo"), Literal(42))))
- case Function(vparams, body) => (eliminated by lambdaLift)
- // vparams => body where vparams:List[ValDef]
- case Assign(lhs, rhs) =>
- // lhs = rhs
- case AssignOrNamedArg(lhs, rhs) => (eliminated by typecheck)
- // lhs = rhs
- case If(cond, thenp, elsep) =>
- // if (cond) thenp else elsep
- case Match(selector, cases) =>
- // selector match { cases }
- case Return(expr) =>
- // return expr
- case Try(block, catches, finalizer) =>
- // try block catch { catches } finally finalizer where catches: List[CaseDef]
- case Throw(expr) =>
- // throw expr
- case New(tpt) =>
- // new tpt always in the context: (new tpt).<init>[targs](args)
- case Typed(expr, tpt) => (eliminated by erasure)
- // expr: tpt
- case TypeApply(fun, args) =>
- // fun[args]
- case Apply(fun, args) =>
- // fun(args)
- // for instance fun[targs](args) is expressed as Apply(TypeApply(fun, targs), args)
- case ApplyDynamic(qual, args) (introduced by erasure, eliminated by cleanup)
- // fun(args)
- case Super(qual, mix) =>
- // qual.super[mix] if qual and/or mix is empty, ther are nme.EMPTY.toTypeName
- case This(qual) =>
- // qual.this
- case Select(qualifier, selector) =>
- // qualifier.selector
- case Ident(name) =>
- // name
- // note: type checker converts idents that refer to enclosing fields or methods
- // to selects; name ==> this.name
- case Literal(value) =>
- // value
- case TypeTree() => (introduced by refcheck)
- // a type that's not written out, but given in the tpe attribute
- case Annotated(annot, arg) => (eliminated by typer)
- // arg @annot for types, arg: @annot for exprs
- case SingletonTypeTree(ref) => (eliminated by uncurry)
- // ref.type
- case SelectFromTypeTree(qualifier, selector) => (eliminated by uncurry)
- // qualifier # selector, a path-dependent type p.T is expressed as p.type # T
- case CompoundTypeTree(templ: Template) => (eliminated by uncurry)
- // parent1 with ... with parentN { refinement }
- case AppliedTypeTree(tpt, args) => (eliminated by uncurry)
- // tpt[args]
- case TypeBoundsTree(lo, hi) => (eliminated by uncurry)
- // >: lo <: hi
- case ExistentialTypeTree(tpt, whereClauses) => (eliminated by uncurry)
- // tpt forSome { whereClauses }
-
-*/
+// ----- auxiliary objects and methods ------------------------------
abstract class TreeCopier {
def ClassDef(tree: Tree, mods: Modifiers, name: Name, tparams: List[TypeDef], impl: Template): ClassDef
@@ -1167,7 +445,11 @@ trait Trees {
def TypeApply(tree: Tree, fun: Tree, args: List[Tree]) =
new TypeApply(fun, args).copyAttrs(tree)
def Apply(tree: Tree, fun: Tree, args: List[Tree]) =
- new Apply(fun, args).copyAttrs(tree)
+ (tree match {
+ case _: ApplyToImplicitArgs => new ApplyToImplicitArgs(fun, args)
+ case _: ApplyImplicitView => new ApplyImplicitView(fun, args)
+ case _ => new Apply(fun, args)
+ }).copyAttrs(tree)
def ApplyDynamic(tree: Tree, qual: Tree, args: List[Tree]) =
new ApplyDynamic(qual, args).copyAttrs(tree)
def Super(tree: Tree, qual: Name, mix: Name) =
@@ -1544,10 +826,6 @@ trait Trees {
treeCopy.ExistentialTypeTree(tree, transform(tpt), transformTrees(whereClauses))
case SelectFromArray(qualifier, selector, erasure) =>
treeCopy.SelectFromArray(tree, transform(qualifier), selector, erasure)
- case tree : StubTree =>
- tree.symbol = NoSymbol
- tree.tpe = null
- tree
}
def transformTrees(trees: List[Tree]): List[Tree] =
@@ -1583,142 +861,50 @@ trait Trees {
}
}
- class Traverser {
- protected var currentOwner: Symbol = definitions.RootClass
- def traverse(tree: Tree): Unit = tree match {
- case EmptyTree =>
- ;
- case PackageDef(pid, stats) =>
- traverse(pid)
- atOwner(tree.symbol.moduleClass) {
- traverseTrees(stats)
- }
- case ClassDef(mods, name, tparams, impl) =>
- atOwner(tree.symbol) {
- traverseTrees(mods.annotations); traverseTrees(tparams); traverse(impl)
- }
- case ModuleDef(mods, name, impl) =>
- atOwner(tree.symbol.moduleClass) {
- traverseTrees(mods.annotations); traverse(impl)
- }
- case ValDef(mods, name, tpt, rhs) =>
- atOwner(tree.symbol) {
- traverseTrees(mods.annotations); traverse(tpt); traverse(rhs)
- }
- case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
- atOwner(tree.symbol) {
- traverseTrees(mods.annotations); traverseTrees(tparams); traverseTreess(vparamss); traverse(tpt); traverse(rhs)
- }
- case TypeDef(mods, name, tparams, rhs) =>
- atOwner(tree.symbol) {
- traverseTrees(mods.annotations); traverseTrees(tparams); traverse(rhs)
- }
- case LabelDef(name, params, rhs) =>
- traverseTrees(params); traverse(rhs)
- case Import(expr, selectors) =>
- traverse(expr)
- case Annotated(annot, arg) =>
- traverse(annot); traverse(arg)
- case DocDef(comment, definition) =>
- traverse(definition)
- case Template(parents, self, body) =>
- traverseTrees(parents)
- if (!self.isEmpty) traverse(self)
- traverseStats(body, tree.symbol)
- case Block(stats, expr) =>
- traverseTrees(stats); traverse(expr)
- case CaseDef(pat, guard, body) =>
- traverse(pat); traverse(guard); traverse(body)
- case Alternative(trees) =>
- traverseTrees(trees)
- case Star(elem) =>
- traverse(elem)
- case Bind(name, body) =>
- traverse(body)
- case UnApply(fun, args) =>
- traverse(fun); traverseTrees(args)
- case ArrayValue(elemtpt, trees) =>
- traverse(elemtpt); traverseTrees(trees)
- case Function(vparams, body) =>
- atOwner(tree.symbol) {
- traverseTrees(vparams); traverse(body)
- }
- case Assign(lhs, rhs) =>
- traverse(lhs); traverse(rhs)
+ class Traverser extends super.Traverser {
+ /** Compiler specific tree types are handled here: the remainder are in
+ * the library's abstract tree traverser.
+ */
+ override def traverse(tree: Tree): Unit = tree match {
case AssignOrNamedArg(lhs, rhs) =>
traverse(lhs); traverse(rhs)
- case If(cond, thenp, elsep) =>
- traverse(cond); traverse(thenp); traverse(elsep)
- case Match(selector, cases) =>
- traverse(selector); traverseTrees(cases)
- case Return(expr) =>
- traverse(expr)
- case Try(block, catches, finalizer) =>
- traverse(block); traverseTrees(catches); traverse(finalizer)
- case Throw(expr) =>
- traverse(expr)
- case New(tpt) =>
- traverse(tpt)
- case Typed(expr, tpt) =>
- traverse(expr); traverse(tpt)
- case TypeApply(fun, args) =>
- traverse(fun); traverseTrees(args)
- case Apply(fun, args) =>
- traverse(fun); traverseTrees(args)
- case ApplyDynamic(qual, args) =>
- traverse(qual); traverseTrees(args)
- case Super(_, _) =>
- ;
- case This(_) =>
- ;
- case Select(qualifier, selector) =>
- traverse(qualifier)
- case Ident(_) =>
- ;
- case Literal(_) =>
- ;
- case TypeTree() =>
- ;
- case SingletonTypeTree(ref) =>
- traverse(ref)
- case SelectFromTypeTree(qualifier, selector) =>
- traverse(qualifier)
- case CompoundTypeTree(templ) =>
- traverse(templ)
- case AppliedTypeTree(tpt, args) =>
- traverse(tpt); traverseTrees(args)
- case TypeBoundsTree(lo, hi) =>
- traverse(lo); traverse(hi)
- case ExistentialTypeTree(tpt, whereClauses) =>
- traverse(tpt); traverseTrees(whereClauses)
- case SelectFromArray(qualifier, selector, erasure) =>
- traverse(qualifier)
+ case DocDef(comment, definition) =>
+ traverse(definition)
case Parens(ts) =>
traverseTrees(ts)
- case tree : StubTree =>
+ case _ => super.traverse(tree)
}
- def traverseTrees(trees: List[Tree]) {
- trees foreach traverse
- }
- def traverseTreess(treess: List[List[Tree]]) {
- treess foreach traverseTrees
- }
- def traverseStats(stats: List[Tree], exprOwner: Symbol) {
+ /** The abstract traverser is not aware of Tree.isTerm, so we override this one.
+ */
+ override def traverseStats(stats: List[Tree], exprOwner: Symbol) {
stats foreach (stat =>
if (exprOwner != currentOwner && stat.isTerm) atOwner(exprOwner)(traverse(stat))
- else traverse(stat))
+ else traverse(stat)
+ )
}
+
+ /** Leave apply available in the generic traverser to do something else.
+ */
def apply[T <: Tree](tree: T): T = { traverse(tree); tree }
+ }
- def atOwner(owner: Symbol)(traverse: => Unit) {
- val prevOwner = currentOwner
- currentOwner = owner
- traverse
- currentOwner = prevOwner
+ private lazy val duplicator = new Transformer {
+ override val treeCopy = new StrictTreeCopier
+ override def transform(t: Tree) = {
+ val t1 = super.transform(t)
+ if ((t1 ne t) && t1.pos.isRange) t1 setPos t.pos.focus
+ t1
}
}
+ private class ShallowDuplicator(orig: Tree) extends Transformer {
+ override val treeCopy = new StrictTreeCopier
+ override def transform(tree: Tree) =
+ if (tree eq orig) super.transform(tree)
+ else tree
+ }
+
class TreeSubstituter(from: List[Symbol], to: List[Tree]) extends Transformer {
override def transform(tree: Tree): Tree = tree match {
case Ident(_) =>
@@ -1854,7 +1040,14 @@ trait Trees {
case _ =>
if (tree.hasSymbol && isLocal(tree.symbol)) tree.symbol = NoSymbol
}
- tree.resetType()
+ tree match {
+ case tpt: TypeTree =>
+ if (tpt.wasEmpty) tree.tpe = null
+ case EmptyTree =>
+ ;
+ case _ =>
+ tree.tpe = null
+ }
super.traverse(tree)
}
}
@@ -1876,14 +1069,5 @@ trait Trees {
super.traverse(tree)
}
}
-
- /* hook to memoize trees in IDE */
- trait TreeKind {
- def isType : Boolean
- def isTerm : Boolean
- def isDef : Boolean
- def hasSymbol : Boolean
- def isTop : Boolean
- }
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
index 1f17f148aa..e918489446 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
@@ -9,12 +9,11 @@ package ast.parser
import scala.collection.mutable
import mutable.{ Buffer, ArrayBuffer, ListBuffer, HashMap }
-import scala.util.control.ControlException
-import scala.tools.nsc.util.{Position,NoPosition,SourceFile,CharArrayReader}
+import scala.util.control.ControlThrowable
+import scala.tools.nsc.util.{SourceFile,CharArrayReader}
import scala.xml.{ Text, TextBuffer }
import scala.xml.Utility.{ isNameStart, isNameChar, isSpace }
import util.Chars.{ SU, LF }
-import scala.annotation.switch
// XXX/Note: many/most of the functions in here are almost direct cut and pastes
// from another file - scala.xml.parsing.MarkupParser, it looks like.
@@ -36,18 +35,15 @@ trait MarkupParsers
{
self: Parsers =>
- type PositionType = Position
- type InputType = CharArrayReader
-
- case object MissingEndTagException extends RuntimeException with ControlException {
+ case object MissingEndTagControl extends ControlThrowable {
override def getMessage = "start tag was here: "
}
- case object ConfusedAboutBracesException extends RuntimeException with ControlException {
+ case object ConfusedAboutBracesControl extends ControlThrowable {
override def getMessage = " I encountered a '}' where I didn't expect one, maybe this tag isn't closed <"
}
- case object TruncatedXML extends RuntimeException with ControlException {
+ case object TruncatedXMLControl extends ControlThrowable {
override def getMessage = "input ended while parsing XML"
}
@@ -58,10 +54,18 @@ trait MarkupParsers
import Tokens.{ EMPTY, LBRACE, RBRACE }
type PositionType = Position
+ type InputType = CharArrayReader
+ type ElementType = Tree
+ type AttributesType = mutable.Map[String, Tree]
+ type NamespaceType = Any // namespaces ignored
+
+ def mkAttributes(name: String, other: NamespaceType): AttributesType = xAttributes
+
val eof = false
+ def truncatedError(msg: String): Nothing = throw TruncatedXMLControl
def xHandleError(that: Char, msg: String) =
- if (ch == SU) throw TruncatedXML
+ if (ch == SU) throw TruncatedXMLControl
else reportSyntaxError(msg)
var input : CharArrayReader = _
@@ -75,15 +79,12 @@ trait MarkupParsers
def ch = input.ch
/** this method assign the next character to ch and advances in input */
def nextch = { val result = input.ch; input.nextChar(); result }
+ def ch_returning_nextch = nextch
- var xEmbeddedBlock = false
+ def mkProcInstr(position: Position, name: String, text: String): Tree =
+ parser.symbXMLBuilder.procInstr(position, name, text)
- /** Execute body with a variable saved and restored after execution */
- def saving[A,B](getter: A, setter: (A) => Unit)(body: => B): B = {
- val saved = getter
- try body
- finally setter(saved)
- }
+ var xEmbeddedBlock = false
private var debugLastStartElement = new mutable.Stack[(Int, String)]
private def debugLastPos = debugLastStartElement.top._1
@@ -91,15 +92,11 @@ trait MarkupParsers
private def errorBraces() = {
reportSyntaxError("in XML content, please use '}}' to express '}'")
- throw ConfusedAboutBracesException
+ throw ConfusedAboutBracesControl
}
- private def errorNoEnd(tag: String) = {
+ def errorNoEnd(tag: String) = {
reportSyntaxError("expected closing tag of " + tag)
- throw MissingEndTagException
- }
- private def errorAndResult[T](msg: String, x: T): T = {
- reportSyntaxError(msg)
- x
+ throw MissingEndTagControl
}
/** checks whether next character starts a Scala block, if yes, skip it.
@@ -128,9 +125,7 @@ trait MarkupParsers
val mid = curOffset
val value: Tree = ch match {
case '"' | '\'' =>
- nextch
- val tmp = xAttributeValue(delim)
- nextch
+ val tmp = xAttributeValue(ch_returning_nextch)
try handle.parseAttribute(r2p(start, mid, curOffset), tmp)
catch {
@@ -142,7 +137,7 @@ trait MarkupParsers
nextch
xEmbeddedExpr
case SU =>
- throw TruncatedXML
+ throw TruncatedXMLControl
case _ =>
errorAndResult("' or \" delimited attribute value or '{' scala-expr '}' expected", Literal(Constant("<syntax-error>")))
}
@@ -157,43 +152,6 @@ trait MarkupParsers
aMap
}
- /** attribute value, terminated by either ' or ". value may not contain <.
- * @param endch either ' or "
- */
- def xAttributeValue(endCh: Char): String = {
- val buf = new StringBuilder
- while (ch != endCh) {
- // well-formedness constraint
- if (ch == '<') return errorAndResult("'<' not allowed in attrib value", "")
- else if (ch == SU) throw TruncatedXML
- else buf append nextch
- }
- // @todo: normalize attribute value
- buf.toString
- }
-
- /** parse a start or empty tag.
- * [40] STag ::= '<' Name { S Attribute } [S]
- * [44] EmptyElemTag ::= '<' Name { S Attribute } [S]
- */
- def xTag: (String, mutable.Map[String, Tree]) = {
- val elemName = xName
- xSpaceOpt
-
- (elemName, xAttributes)
- }
-
- /** [42] '<' xmlEndTag ::= '<' '/' Name S? '>'
- */
- def xEndTag(startName: String) {
- xToken('/')
- if (xName != startName)
- errorNoEnd(startName)
-
- xSpaceOpt
- xToken('>')
- }
-
/** '<! CharData ::= [CDATA[ ( {char} - {char}"]]>"{char} ) ']]>'
*
* see [15]
@@ -210,36 +168,6 @@ trait MarkupParsers
xTakeUntil(handle.unparsed, () => r2p(start, start, curOffset), "</xml:unparsed>")
}
- /** CharRef ::= "&#" '0'..'9' {'0'..'9'} ";"
- * | "&#x" '0'..'9'|'A'..'F'|'a'..'f' { hexdigit } ";"
- *
- * see [66]
- */
- def xCharRef: String = {
- val hex = (ch == 'x') && { nextch; true }
- val base = if (hex) 16 else 10
- var i = 0
- while (ch != ';') {
- (ch: @switch) match {
- case '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' =>
- i = i * base + ch.asDigit
- case 'a' | 'b' | 'c' | 'd' | 'e' | 'f'
- | 'A' | 'B' | 'C' | 'D' | 'E' | 'F' =>
- if (!hex)
- reportSyntaxError("hex char not allowed in decimal char ref\n"
- +"Did you mean to write &#x ?");
- else
- i = i * base + ch.asDigit
- case SU =>
- throw TruncatedXML
- case _ =>
- reportSyntaxError("character '"+ch+"' not allowed in char ref")
- }
- nextch
- }
- i.toChar.toString
- }
-
/** Comment ::= '<!--' ((Char - '-') | ('-' (Char - '-')))* '-->'
*
* see [15]
@@ -334,7 +262,7 @@ trait MarkupParsers
*/
def element: Tree = {
val start = curOffset
- val (qname, attrMap) = xTag
+ val (qname, attrMap) = xTag(())
if (ch == '/') { // empty element
xToken("/>")
handle.element(r2p(start, start, curOffset), qname, attrMap, new ListBuffer[Tree])
@@ -356,42 +284,6 @@ trait MarkupParsers
}
}
- /** actually, Name ::= (Letter | '_' | ':') (NameChar)* but starting with ':' cannot happen
- * Name ::= (Letter | '_') (NameChar)*
- *
- * see [5] of XML 1.0 specification
- *
- * pre-condition: ch != ':' // assured by definition of XMLSTART token
- * post-condition: name does neither start, nor end in ':'
- */
- def xName: String = {
- if (ch == SU) throw TruncatedXML
- else if (!isNameStart(ch))
- return errorAndResult("name expected, but char '%s' cannot start a name" format ch, "")
-
- val buf = new StringBuilder
-
- do buf append nextch
- while (isNameChar(ch))
-
- if (buf.last == ':') {
- reportSyntaxError( "name cannot end in ':'" )
- buf setLength (buf.length - 1)
- }
- buf.toString.intern
- }
-
- /** '<?' ProcInstr ::= Name [S ({Char} - ({Char}'>?' {Char})]'?>'
- *
- * see [15]
- */
- // <?xml2 version="1.0" encoding="UTF-8" standalone="yes"?>
- def xProcInstr: Tree = {
- val n = xName
- xSpaceOpt
- xTakeUntil(handle.procInstr(_: Position, n, _:String), () => tmppos, "?>")
- }
-
/** parse character data.
* precondition: xEmbeddedBlock == false (we are not in a scala block)
*/
@@ -415,23 +307,21 @@ trait MarkupParsers
}
/** Some try/catch/finally logic used by xLiteral and xLiteralPattern. */
- private def xLiteralCommon(f: () => Tree, ifTruncated: Exception => Unit): Tree =
- try f()
+ private def xLiteralCommon(f: () => Tree, ifTruncated: String => Unit): Tree = {
+ try return f()
catch {
- case ex: RuntimeException =>
- ex match {
- case c @ TruncatedXML =>
- ifTruncated(c)
- case c @ (MissingEndTagException | ConfusedAboutBracesException) =>
- parser.syntaxError(debugLastPos, c.getMessage + debugLastElem + ">")
- case _: ArrayIndexOutOfBoundsException =>
- parser.syntaxError(debugLastPos, "missing end tag in XML literal for <%s>" format debugLastElem)
- case _ => throw ex
- }
- EmptyTree
+ case c @ TruncatedXMLControl =>
+ ifTruncated(c.getMessage)
+ case c @ (MissingEndTagControl | ConfusedAboutBracesControl) =>
+ parser.syntaxError(debugLastPos, c.getMessage + debugLastElem + ">")
+ case _: ArrayIndexOutOfBoundsException =>
+ parser.syntaxError(debugLastPos, "missing end tag in XML literal for <%s>" format debugLastElem)
}
finally parser.in resume Tokens.XMLSTART
+ EmptyTree
+ }
+
/** Use a lookahead parser to run speculative body, and return the first char afterward. */
private def charComingAfter(body: => Unit): Char = {
input = input.lookaheadReader
@@ -469,7 +359,7 @@ trait MarkupParsers
ts(0)
}
},
- ex => parser.incompleteInputError(ex.getMessage)
+ msg => parser.incompleteInputError(msg)
)
/** @see xmlPattern. resynchronizes after successful parse
@@ -485,7 +375,7 @@ trait MarkupParsers
tree
}
},
- ex => parser.syntaxError(curOffset, ex.getMessage)
+ msg => parser.syntaxError(curOffset, msg)
)
def escapeToScala[A](op: => A, kind: String) = {
@@ -544,7 +434,7 @@ trait MarkupParsers
assert(!xEmbeddedBlock, "problem with embedded block")
case SU =>
- throw TruncatedXML
+ throw TruncatedXMLControl
case _ => // text
appendText(r2p(start1, start1, curOffset), ts, xText)
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index 70d8c5d23f..1cfee481bc 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -10,7 +10,7 @@ package scala.tools.nsc
package ast.parser
import scala.collection.mutable.ListBuffer
-import scala.tools.nsc.util.{Position, OffsetPosition, NoPosition, BatchSourceFile}
+import scala.tools.nsc.util.{OffsetPosition, BatchSourceFile}
import symtab.Flags
import Tokens._
@@ -350,8 +350,11 @@ self =>
/** Check that type parameter is not by name T* */
def checkNotByName(t: Tree) = t match {
- case AppliedTypeTree(Select(_, n), _) if (n == nme.BYNAME_PARAM_CLASS_NAME.toTypeName) =>
- syntaxError(t.pos, "no by-name parameter type allowed here", false)
+ case AppliedTypeTree(Select(_, n), _) =>
+ if (n == nme.BYNAME_PARAM_CLASS_NAME.toTypeName)
+ syntaxError(t.pos, "no by-name parameter type allowed here", false)
+ else if (n == nme.REPEATED_PARAM_CLASS_NAME.toTypeName)
+ syntaxError(t.pos, "no * parameter type allowed here", false)
case _ =>
}
@@ -1104,7 +1107,11 @@ self =>
}
} else if (in.token == MATCH) {
t = atPos(t.pos.startOrPoint, in.skipToken()) {
- Match(stripParens(t), surround(LBRACE, RBRACE)(caseClauses(), Nil))
+ /** For debugging pattern matcher transition issues */
+ if (settings.Ypmatnaive.value)
+ makeSequencedMatch(stripParens(t), surround(LBRACE, RBRACE)(caseClauses(), Nil))
+ else
+ Match(stripParens(t), surround(LBRACE, RBRACE)(caseClauses(), Nil))
}
}
// in order to allow anonymous functions as statements (as opposed to expressions) inside
@@ -1569,7 +1576,7 @@ self =>
/* -------- MODIFIERS and ANNOTATIONS ------------------------------------------- */
- /** Drop `private' modifier when follwed by a qualifier.
+ /** Drop `private' modifier when followed by a qualifier.
* Conract `abstract' and `override' to ABSOVERRIDE
*/
private def normalize(mods: Modifiers): Modifiers =
@@ -1714,8 +1721,10 @@ self =>
mods = modifiers() | Flags.PARAMACCESSOR
if (mods.hasFlag(Flags.LAZY)) syntaxError("lazy modifier not allowed here. Use call-by-name parameters instead", false)
if (in.token == VAL) {
+ mods = mods withPosition (in.token, tokenRange(in))
in.nextToken()
} else if (in.token == VAR) {
+ mods = mods withPosition (in.token, tokenRange(in))
mods |= Flags.MUTABLE
in.nextToken()
} else if (!caseParam) {
@@ -1739,6 +1748,10 @@ self =>
in.offset,
(if (mods.hasFlag(Flags.MUTABLE)) "`var'" else "`val'") +
" parameters may not be call-by-name", false)
+ else if (implicitmod != 0)
+ syntaxError(
+ in.offset,
+ "implicit parameters may not be call-by-name", false)
else bynamemod = Flags.BYNAMEPARAM
}
paramType()
@@ -1836,7 +1849,7 @@ self =>
}
val nameOffset = in.offset
val pname =
- (if (in.token == USCORE) { // @M! also allow underscore
+ (if (in.token == USCORE) { // TODO AM: freshName(o2p(in.skipToken()), "_$$"), will need to update test suite
in.nextToken()
nme.WILDCARD
} else ident()).toTypeName
@@ -1892,14 +1905,14 @@ self =>
/** Import ::= import ImportExpr {`,' ImportExpr}
*/
def importClause(): List[Tree] = {
- accept(IMPORT)
- commaSeparated(importExpr())
+ val offset = accept(IMPORT)
+ commaSeparated(importExpr(offset))
}
/** ImportExpr ::= StableId `.' (Id | `_' | ImportSelectors)
* XXX: Hook for IDE
*/
- def importExpr(): Tree = {
+ def importExpr(importOffset: Int): Tree = {
val start = in.offset
var t: Tree = null
if (in.token == THIS) {
@@ -1926,7 +1939,7 @@ self =>
if (in.token == USCORE) {
val uscoreOffset = in.offset
in.nextToken()
- Import(t, List(ImportSelector(nme.WILDCARD, uscoreOffset, null, -1)))
+ Import(t, List(ImportSelector(nme.WILDCARD, uscoreOffset, nme.WILDCARD, -1)))
} else if (in.token == LBRACE) {
Import(t, importSelectors())
} else {
@@ -1943,7 +1956,7 @@ self =>
Import(t, List(ImportSelector(name, nameOffset, name, nameOffset)))
}
}
- atPos(start) { loop() }
+ atPos(importOffset, start) { loop() }
}
/** ImportSelectors ::= `{' {ImportSelector `,'} (ImportSelector | `_') `}'
@@ -2465,11 +2478,15 @@ self =>
val stats = new ListBuffer[Tree]
while (in.token != RBRACE && in.token != EOF) {
if (in.token == PACKAGE) {
- in.flushDoc
val start = in.skipToken()
- stats += {
- if (in.token == OBJECT) makePackageObject(start, objectDef(in.offset, NoMods))
- else packaging(start)
+ stats ++= {
+ if (in.token == OBJECT) {
+ joinComment(List(makePackageObject(start, objectDef(in.offset, NoMods))))
+ }
+ else {
+ in.flushDoc
+ List(packaging(start))
+ }
}
} else if (in.token == IMPORT) {
in.flushDoc
@@ -2627,15 +2644,15 @@ self =>
while (in.token == SEMI) in.nextToken()
val start = in.offset
if (in.token == PACKAGE) {
- in.flushDoc
in.nextToken()
if (in.token == OBJECT) {
- ts += makePackageObject(start, objectDef(in.offset, NoMods))
+ ts ++= joinComment(List(makePackageObject(start, objectDef(in.offset, NoMods))))
if (in.token != EOF) {
acceptStatSep()
ts ++= topStatSeq()
}
} else {
+ in.flushDoc
val pkg = qualId()
newLineOptWhenFollowedBy(LBRACE)
if (in.token == EOF) {
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
index 6cb7c8b99f..eeb3fb6e7a 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
@@ -7,12 +7,11 @@ package scala.tools.nsc
package ast.parser
import scala.tools.nsc.util._
-import Chars.{LF, FF, CR, SU}
+import Chars._
import Tokens._
import scala.annotation.switch
import scala.collection.mutable.{ListBuffer, ArrayBuffer}
import scala.xml.Utility.{ isNameStart }
-import util.Chars._
trait Scanners {
val global : Global
@@ -60,7 +59,7 @@ trait Scanners {
def resume(lastCode: Int) = {
token = lastCode
- assert(next.token == EMPTY)
+ assert(next.token == EMPTY || reporter.hasErrors)
nextToken()
}
@@ -318,7 +317,7 @@ trait Scanners {
if (ch == '\"') {
nextChar()
if (ch == '\"') {
- nextChar()
+ nextRawChar()
val saved = lineStartOffset
getMultiLineStringLit()
if (lineStartOffset != saved) // ignore linestarts within a multi-line string
@@ -561,9 +560,9 @@ trait Scanners {
private def getMultiLineStringLit() {
if (ch == '\"') {
- nextChar()
+ nextRawChar()
if (ch == '\"') {
- nextChar()
+ nextRawChar()
if (ch == '\"') {
nextChar()
while (ch == '\"') {
@@ -585,7 +584,7 @@ trait Scanners {
incompleteInputError("unclosed multi-line string literal")
} else {
putChar(ch)
- nextChar()
+ nextRawChar()
getMultiLineStringLit()
}
}
@@ -803,7 +802,7 @@ trait Scanners {
}
/** Parse character literal if current character is followed by \',
- * or follow with given op and return a symol literal token
+ * or follow with given op and return a symbol literal token
*/
def charLitOr(op: () => Unit) {
putChar(ch)
@@ -1015,7 +1014,7 @@ trait Scanners {
class UnitScanner(unit: CompilationUnit, patches: List[BracePatch]) extends Scanner {
def this(unit: CompilationUnit) = this(unit, List())
val buf = unit.source.asInstanceOf[BatchSourceFile].content
- val decodeUnit = !settings.nouescape.value
+ override val decodeUni: Boolean = !settings.nouescape.value
def warning(off: Offset, msg: String) = unit.warning(unit.position(off), msg)
def error (off: Offset, msg: String) = unit.error(unit.position(off), msg)
diff --git a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
index 57b705094c..049c8aa2ff 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
@@ -10,8 +10,8 @@ package ast.parser
import collection.mutable.Map
import xml.{ EntityRef, Text }
import xml.XML.{ xmlns }
-import util.Position
import symtab.Flags.MUTABLE
+import scala.tools.util.StringOps.splitWhere
/** This class builds instance of <code>Tree</code> that represent XML.
*
@@ -161,9 +161,9 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean)
}
/** Returns (Some(prefix) | None, rest) based on position of ':' */
- def splitPrefix(name: String): (Option[String], String) = (name indexOf ':') match {
- case -1 => (None, name)
- case i => (Some(name take i), name drop (i + 1))
+ def splitPrefix(name: String): (Option[String], String) = splitWhere(name, _ == ':', true) match {
+ case Some((pre, rest)) => (Some(pre), rest)
+ case _ => (None, name)
}
/** Various node constructions. */
@@ -191,7 +191,7 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean)
/** Extract all the namespaces from the attribute map. */
val namespaces: List[Tree] =
- for (z <- attrMap.keysIterator.toList ; if z startsWith xmlns) yield {
+ for (z <- attrMap.keys.toList ; if z startsWith xmlns) yield {
val ns = splitPrefix(z) match {
case (Some(_), rest) => rest
case _ => null
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala b/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala
index 9aed4bd767..2146fa0fe6 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala
@@ -7,7 +7,14 @@
package scala.tools.nsc
package ast.parser
-object Tokens {
+import annotation.switch
+
+/** Common code between JavaTokens and Tokens. Not as much (and not as concrete)
+ * as one might like because JavaTokens for no clear reason chose new numbers for
+ * identical token sets.
+ */
+abstract class Tokens {
+ import util.Chars._
/** special tokens */
final val EMPTY = -3
@@ -22,6 +29,23 @@ object Tokens {
final val FLOATLIT = 4
final val DOUBLELIT = 5
final val STRINGLIT = 6
+
+ def LPAREN: Int
+ def RBRACE: Int
+
+ def isIdentifier(code: Int): Boolean
+ def isLiteral(code: Int): Boolean
+ def isKeyword(code: Int): Boolean
+ def isSymbol(code: Int): Boolean
+
+ final def isSpace(at: Char) = at == ' ' || at == '\t'
+ final def isNewLine(at: Char) = at == CR || at == LF || at == FF
+ final def isBrace(code : Int) = code >= LPAREN && code <= RBRACE
+ final def isOpenBrace(code : Int) = isBrace(code) && (code % 2 == 0)
+ final def isCloseBrace(code : Int) = isBrace(code) && (code % 2 == 1)
+}
+
+object Tokens extends Tokens {
final val SYMBOLLIT = 7
def isLiteral(code : Int) =
code >= CHARLIT && code <= SYMBOLLIT
@@ -32,16 +56,14 @@ object Tokens {
def isIdentifier(code : Int) =
code >= IDENTIFIER && code <= BACKQUOTED_IDENT
- def canBeginExpression(code : Int) = code match {
- case IDENTIFIER|BACKQUOTED_IDENT|USCORE => true
- case LBRACE|LPAREN|LBRACKET|COMMENT|STRINGLIT => true
- case IF|DO|WHILE|FOR|NEW|TRY|THROW => true
- case NULL|THIS|TRUE|FALSE => true
- case code if isLiteral(code) => true
- case _ => false
+ @switch def canBeginExpression(code : Int) = code match {
+ case IDENTIFIER|BACKQUOTED_IDENT|USCORE => true
+ case LBRACE|LPAREN|LBRACKET|COMMENT|STRINGLIT => true
+ case IF|DO|WHILE|FOR|NEW|TRY|THROW => true
+ case NULL|THIS|TRUE|FALSE => true
+ case code => isLiteral(code)
}
-
/** keywords */
final val IF = 20
final val FOR = 21
@@ -90,15 +112,14 @@ object Tokens {
def isKeyword(code : Int) =
code >= IF && code <= LAZY
- def isDefinition(code : Int) = code match {
- case CLASS|TRAIT|OBJECT => true
- case CASECLASS|CASEOBJECT => true
- case DEF|VAL|VAR => true
- case TYPE => true
- case _ => false
+ @switch def isDefinition(code : Int) = code match {
+ case CLASS|TRAIT|OBJECT => true
+ case CASECLASS|CASEOBJECT => true
+ case DEF|VAL|VAR => true
+ case TYPE => true
+ case _ => false
}
-
/** special symbols */
final val COMMA = 70
final val SEMI = 71
@@ -127,11 +148,6 @@ object Tokens {
final val LBRACE = 94
final val RBRACE = 95
- def isBrace(code : Int) =
- code >= LPAREN && code <= RBRACE
- def isOpenBrace(code : Int) = isBrace(code) && (code % 2 == 0)
- def isCloseBrace(code : Int) = isBrace(code) && (code % 2 == 1)
-
/** XML mode */
final val XMLSTART = 96
@@ -141,15 +157,4 @@ object Tokens {
final val WHITESPACE = 105
final val IGNORE = 106
final val ESCAPE = 109
-
- def isSpace(at : Char) = at match {
- case ' ' | '\t' => true
- case _ => false
- }
- import util.Chars._
-
- def isNewLine(at : Char) = at match {
- case CR | LF | FF => true
- case _ => false
- }
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
index 6802757083..bea52b1153 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
@@ -9,7 +9,6 @@ package ast.parser
import symtab.Flags._
import scala.collection.mutable.ListBuffer
-import scala.tools.nsc.util.Position
/** Methods for building trees, used in the parser. All the trees
* returned by this class must be untyped.
@@ -65,46 +64,54 @@ abstract class TreeBuilder {
* The variables keep their positions; whereas the pattern is converted to be synthetic
* for all nodes that contain a variable position.
*/
- private object getvarTraverser extends Traverser {
+ class GetVarTraverser extends Traverser {
val buf = new ListBuffer[(Name, Tree, Position)]
- def init: Traverser = { buf.clear; this }
+
def namePos(tree: Tree, name: Name): Position =
- if (!tree.pos.isRange || name.toString.contains('$')) tree.pos.focus
+ if (!tree.pos.isRange || name.containsName(nme.DOLLARraw)) tree.pos.focus
else {
val start = tree.pos.start
val end = start + name.decode.length
r2p(start, start, end)
}
+
override def traverse(tree: Tree): Unit = {
+ def seenName(name: Name) = buf exists (_._1 == name)
+ def add(name: Name, t: Tree) = if (!seenName(name)) buf += ((name, t, namePos(tree, name)))
val bl = buf.length
+
tree match {
- case Bind(name, Typed(tree1, tpt)) =>
- if ((name != nme.WILDCARD) && (buf.iterator forall (name !=))) {
- buf += ((name, if (treeInfo.mayBeTypePat(tpt)) TypeTree() else tpt.duplicate, namePos(tree, name)))
- }
+ case Bind(nme.WILDCARD, _) =>
+ super.traverse(tree)
+
+ case Bind(name, Typed(tree1, tpt)) =>
+ val newTree = if (treeInfo.mayBeTypePat(tpt)) TypeTree() else tpt.duplicate
+ add(name, newTree)
traverse(tree1)
- case Bind(name, tree1) =>
- if ((name != nme.WILDCARD) && (buf.iterator forall (name !=))) {
- // can assume only name range as position, as otherwise might overlap
- // with binds embedded in pattern tree1
- buf += ((name, TypeTree(), namePos(tree, name)))
- //println("found var "+name+" at "+namePos.show) //DEBUG
- }
+
+ case Bind(name, tree1) =>
+ // can assume only name range as position, as otherwise might overlap
+ // with binds embedded in pattern tree1
+ add(name, TypeTree())
traverse(tree1)
+
case _ =>
super.traverse(tree)
}
- if (buf.length > bl) tree setPos tree.pos.makeTransparent
+ if (buf.length > bl)
+ tree setPos tree.pos.makeTransparent
+ }
+ def apply(tree: Tree) = {
+ traverse(tree)
+ buf.toList
}
}
/** Returns list of all pattern variables, possibly with their types,
* without duplicates
*/
- private def getVariables(tree: Tree): List[(Name, Tree, Position)] = {
- getvarTraverser.init.traverse(tree)
- getvarTraverser.buf.toList
- }
+ private def getVariables(tree: Tree): List[(Name, Tree, Position)] =
+ new GetVarTraverser apply tree
private def makeTuple(trees: List[Tree], isType: Boolean): Tree = {
val tupString = "Tuple" + trees.length
@@ -198,7 +205,7 @@ abstract class TreeBuilder {
}
}
- /** Create a tree represeting an assignment &lt;lhs = rhs&gt; */
+ /** Create a tree representing an assignment &lt;lhs = rhs&gt; */
def makeAssign(lhs: Tree, rhs: Tree): Tree = lhs match {
case Apply(fn, args) =>
Apply(atPos(fn.pos) { Select(fn, nme.update) }, args ::: List(rhs))
@@ -363,7 +370,13 @@ abstract class TreeBuilder {
/** The position of the closure that starts with generator at position `genpos`.
*/
- def closurePos(genpos: Position) = r2p(genpos.startOrPoint, genpos.point, body.pos.endOrPoint)
+ def closurePos(genpos: Position) = {
+ val end = body.pos match {
+ case NoPosition => genpos.point
+ case bodypos => bodypos.endOrPoint
+ }
+ r2p(genpos.startOrPoint, genpos.point, end)
+ }
// val result =
enums match {
@@ -452,6 +465,37 @@ abstract class TreeBuilder {
def makePatDef(pat: Tree, rhs: Tree): List[Tree] =
makePatDef(Modifiers(0), pat, rhs)
+ /** For debugging only. Desugar a match statement like so:
+ * val x = scrutinee
+ * x match {
+ * case case1 => ...
+ * case _ => x match {
+ * case case2 => ...
+ * case _ => x match ...
+ * }
+ * }
+ *
+ * This way there are never transitions between nontrivial casedefs.
+ * Of course many things break: exhaustiveness and unreachable checking
+ * do not work, no switches will be generated, etc.
+ */
+ def makeSequencedMatch(selector: Tree, cases: List[CaseDef]): Tree = {
+ require(cases.nonEmpty)
+
+ val selectorName = freshName()
+ val valdef = atPos(selector.pos)(ValDef(Modifiers(PRIVATE | LOCAL | SYNTHETIC), selectorName, TypeTree(), selector))
+ val nselector = Ident(selectorName)
+
+ def loop(cds: List[CaseDef]): Match = {
+ def mkNext = CaseDef(Ident(nme.WILDCARD), EmptyTree, loop(cds.tail))
+
+ if (cds.size == 1) Match(nselector, cds)
+ else Match(selector, List(cds.head, mkNext))
+ }
+
+ Block(List(valdef), loop(cases))
+ }
+
/** Create tree for pattern definition <mods val pat0 = rhs> */
def makePatDef(mods: Modifiers, pat: Tree, rhs: Tree): List[Tree] = matchVarPattern(pat) match {
case Some((name, tpt)) =>
diff --git a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
new file mode 100644
index 0000000000..c4365a82ac
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
@@ -0,0 +1,41 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package backend
+
+import io.AbstractFile
+import util.JavaClassPath
+import util.ClassPath.{ JavaContext, DefaultJavaContext }
+import scala.tools.util.PathResolver
+
+trait JavaPlatform extends Platform[AbstractFile] {
+ import global._
+ import definitions.{ BoxesRunTimeClass, getMember }
+
+ lazy val classPath = new PathResolver(settings).result
+ def rootLoader = new loaders.JavaPackageLoader(classPath)
+
+ private def depAnalysisPhase = if (settings.make.value != "all") List(dependencyAnalysis) else Nil
+ def platformPhases = List(
+ flatten, // get rid of inner classes
+ liftcode, // generate reified trees
+ genJVM // generate .class files
+ ) ::: depAnalysisPhase
+
+ lazy val externalEquals = getMember(BoxesRunTimeClass, nme.equals_)
+ def externalEqualsNumNum = getMember(BoxesRunTimeClass, "equalsNumNum")
+ def externalEqualsNumChar = getMember(BoxesRunTimeClass, "equalsNumChar")
+ def externalEqualsNumObject = getMember(BoxesRunTimeClass, "equalsNumObject")
+
+ def isMaybeBoxed(sym: Symbol): Boolean = {
+ import definitions._
+ (sym == ObjectClass) ||
+ (sym == SerializableClass) ||
+ (sym == ComparableClass) ||
+ (sym isNonBottomSubClass BoxedNumberClass) ||
+ (sym isNonBottomSubClass BoxedCharacterClass)
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/backend/MSILPlatform.scala b/src/compiler/scala/tools/nsc/backend/MSILPlatform.scala
new file mode 100644
index 0000000000..6df158c411
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/MSILPlatform.scala
@@ -0,0 +1,36 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package backend
+
+import ch.epfl.lamp.compiler.msil.{ Type => MSILType }
+import util.MsilClassPath
+import msil.GenMSIL
+
+trait MSILPlatform extends Platform[MSILType] {
+ import global._
+ import definitions.{ ComparatorClass, BoxedNumberClass, getMember, getClass }
+
+ if (settings.verbose.value)
+ inform("[AssemRefs = " + settings.assemrefs.value + "]")
+
+ // phaseName = "msil"
+ object genMSIL extends {
+ val global: MSILPlatform.this.global.type = MSILPlatform.this.global
+ val runsAfter = List[String]("dce")
+ val runsRightAfter = None
+ } with GenMSIL
+
+ lazy val classPath = MsilClassPath.fromSettings(settings)
+ def rootLoader = new loaders.NamespaceLoader(classPath)
+
+ def platformPhases = List(
+ genMSIL // generate .msil files
+ )
+
+ lazy val externalEquals = getMember(ComparatorClass.companionModule, nme.equals_)
+ def isMaybeBoxed(sym: Symbol) = sym isNonBottomSubClass BoxedNumberClass
+}
diff --git a/src/compiler/scala/tools/nsc/backend/Platform.scala b/src/compiler/scala/tools/nsc/backend/Platform.scala
new file mode 100644
index 0000000000..90075687c6
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/Platform.scala
@@ -0,0 +1,31 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package backend
+
+import util.ClassPath
+
+/** The platform dependent pieces of Global.
+ */
+trait Platform[T] {
+ val global: Global
+ import global._
+
+ /** The compiler classpath. */
+ def classPath: ClassPath[T]
+
+ /** The root symbol loader. */
+ def rootLoader: LazyType
+
+ /** Any platform-specific phases. */
+ def platformPhases: List[SubComponent]
+
+ /** Symbol for a method which compares two objects. */
+ def externalEquals: Symbol
+
+ /** The various ways a boxed primitive might materialize at runtime. */
+ def isMaybeBoxed(sym: Symbol): Boolean
+}
diff --git a/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala b/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala
index d9830f7462..b95e3335ba 100644
--- a/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala
+++ b/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala
@@ -94,6 +94,7 @@ abstract class ScalaPrimitives {
final val AS = 81 // x.as[y]
final val ISERASED = 85 // x.is$erased[y]
final val ASERASED = 86 // x.as$erased[y]
+ final val HASH = 87 // x.##
// AnyRef operations
final val SYNCHRONIZED = 90 // x.synchronized(y)
@@ -215,6 +216,7 @@ abstract class ScalaPrimitives {
addPrimitive(Any_!=, NE)
addPrimitive(Any_isInstanceOf, IS)
addPrimitive(Any_asInstanceOf, AS)
+ addPrimitive(Any_##, HASH)
// java.lang.Object
addPrimitive(Object_eq, ID)
@@ -560,9 +562,9 @@ abstract class ScalaPrimitives {
def isPrimitive(sym: Symbol): Boolean = primitives contains sym
- /** Return the code for the givem symbol. */
+ /** Return the code for the given symbol. */
def getPrimitive(sym: Symbol): Int = {
- assert(isPrimitive(sym), "Unkown primitive " + sym)
+ assert(isPrimitive(sym), "Unknown primitive " + sym)
primitives(sym)
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
index b12faa1c72..e5afa84c82 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
@@ -189,7 +189,7 @@ trait BasicBlocks {
var i = 0
var changed = false
while (i < instrs.length && !changed) {
- if (instrs(i) == oldInstr) {
+ if (instrs(i) eq oldInstr) {
newInstr.setPos(oldInstr.pos)
instrs(i) = newInstr
changed = true
@@ -201,7 +201,7 @@ trait BasicBlocks {
}
/** Replaces <code>iold</code> with <code>is</code>. It does not update
- * the position field in the newly inserted instrucitons, so it behaves
+ * the position field in the newly inserted instructions, so it behaves
* differently than the one-instruction versions of this function.
*
* @param iold ..
@@ -335,7 +335,7 @@ trait BasicBlocks {
}
/** Emitting does not set touched to true. During code generation this is a hotspot and
- * setting the flag for each emit is a waste. Caching should happend only after a block
+ * setting the flag for each emit is a waste. Caching should happen only after a block
* is closed, which sets the DIRTYSUCCS flag.
*/
def emit(instr: Instruction, pos: Position) {
@@ -490,10 +490,10 @@ trait BasicBlocks {
ss ++ (ss flatMap findSucc)
}
- succs.flatMap(findSucc).removeDuplicates
+ succs.flatMap(findSucc).distinct
}
- /** Returns the precessors of this block. */
+ /** Returns the predecessors of this block. */
def predecessors: List[BasicBlock] = {
if (hasFlag(DIRTYPREDS)) {
resetFlag(DIRTYPREDS)
diff --git a/src/compiler/scala/tools/nsc/backend/icode/CheckerError.scala b/src/compiler/scala/tools/nsc/backend/icode/CheckerException.scala
index 1e6b8abafc..9d102eef28 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/CheckerError.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/CheckerException.scala
@@ -9,5 +9,5 @@ package scala.tools.nsc
package backend
package icode
-class CheckerError(s: String) extends Exception(s)
+class CheckerException(s: String) extends Exception(s)
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Checkers.scala b/src/compiler/scala/tools/nsc/backend/icode/Checkers.scala
index a441f69b59..c34cefdc12 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Checkers.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Checkers.scala
@@ -74,7 +74,7 @@ abstract class Checkers {
def checkICodes: Unit = {
if (settings.verbose.value)
println("[[consistency check at the beginning of phase " + globalPhase.name + "]]")
- classes.valuesIterator foreach check
+ classes.values foreach check
}
def check(cls: IClass) {
@@ -84,17 +84,17 @@ abstract class Checkers {
for (f1 <- cls.fields; f2 <- cls.fields if f1 ne f2)
if (f1.symbol.name == f2.symbol.name)
Checkers.this.global.error("Repetitive field name: " +
- f1.symbol.fullNameString);
+ f1.symbol.fullName);
for (m1 <- cls.methods; m2 <- cls.methods if m1 ne m2)
if (m1.symbol.name == m2.symbol.name &&
m1.symbol.tpe =:= m2.symbol.tpe)
Checkers.this.global.error("Repetitive method: " +
- m1.symbol.fullNameString);
+ m1.symbol.fullName);
clasz.methods.foreach(check)
}
- /** Apply the give funtion to each pair of the cartesian product of
+ /** Apply the give function to each pair of the cartesian product of
* l1 x l2.
*/
def pairwise[a](l1: List[a], l2: List[a])(f: (a, a) => Unit) =
@@ -151,7 +151,7 @@ abstract class Checkers {
else if (s2 eq emptyStack) s1
else {
if (s1.length != s2.length)
- throw new CheckerError("Incompatible stacks: " + s1 + " and " + s2 + " in " + method + " at entry to block: " + bl);
+ throw new CheckerException("Incompatible stacks: " + s1 + " and " + s2 + " in " + method + " at entry to block: " + bl);
new TypeStack((s1.types, s2.types).zipped map lub)
}
}
@@ -241,15 +241,15 @@ abstract class Checkers {
receiver match {
case REFERENCE(sym) =>
checkBool(sym.info.member(method.name) != NoSymbol,
- "Method " + method + " does not exist in " + sym.fullNameString);
+ "Method " + method + " does not exist in " + sym.fullName);
if (method hasFlag Flags.PRIVATE)
checkBool(method.owner == clasz.symbol,
- "Cannot call private method of " + method.owner.fullNameString
- + " from " + clasz.symbol.fullNameString);
+ "Cannot call private method of " + method.owner.fullName
+ + " from " + clasz.symbol.fullName);
else if (method hasFlag Flags.PROTECTED)
checkBool(clasz.symbol isSubClass method.owner,
- "Cannot call protected method of " + method.owner.fullNameString
- + " from " + clasz.symbol.fullNameString);
+ "Cannot call protected method of " + method.owner.fullName
+ + " from " + clasz.symbol.fullName);
case ARRAY(_) =>
checkBool(receiver.toType.member(method.name) != NoSymbol,
diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
index 7f351293c5..b1d1849c71 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
@@ -9,9 +9,9 @@ package scala.tools.nsc
package backend
package icode
-import scala.collection.mutable.{Map, HashMap, ListBuffer, Buffer, HashSet}
+import scala.collection.{ mutable, immutable }
+import scala.collection.mutable.{ HashMap, ListBuffer, Buffer, HashSet }
import scala.tools.nsc.symtab._
-import scala.tools.nsc.util.Position
import scala.annotation.switch
import PartialFunction._
@@ -27,14 +27,16 @@ abstract class GenICode extends SubComponent {
import icodes._
import icodes.opcodes._
import definitions.{
- ArrayClass, ObjectClass, ThrowableClass,
- Object_equals, Object_isInstanceOf, Object_asInstanceOf,
- isMaybeBoxed
+ ArrayClass, ObjectClass, ThrowableClass, StringClass, NothingClass, NullClass,
+ Object_equals, Object_isInstanceOf, Object_asInstanceOf, ScalaRunTimeModule,
+ BoxedNumberClass, BoxedCharacterClass,
+ getMember
}
import scalaPrimitives.{
isArrayOp, isComparisonOp, isLogicalOp,
isUniversalEqualityOp, isReferenceEqualityOp
}
+ import platform.isMaybeBoxed
val phaseName = "icode"
@@ -49,23 +51,16 @@ abstract class GenICode extends SubComponent {
var unit: CompilationUnit = _
- // We assume definitions are alread initialized
- val STRING = REFERENCE(definitions.StringClass)
+ // We assume definitions are already initialized
+ val STRING = REFERENCE(StringClass)
// this depends on the backend! should be changed.
val ANY_REF_CLASS = REFERENCE(ObjectClass)
- val SCALA_ALL = REFERENCE(definitions.NothingClass)
- val SCALA_ALLREF = REFERENCE(definitions.NullClass)
+ val SCALA_ALL = REFERENCE(NothingClass)
+ val SCALA_ALLREF = REFERENCE(NullClass)
val THROWABLE = REFERENCE(ThrowableClass)
- lazy val BoxesRunTime_equals =
- if (!forMSIL)
- definitions.getMember(definitions.BoxesRunTimeClass, nme.equals_)
- else
- definitions.getMember(definitions.getClass("scala.runtime.Comparator").linkedModuleOfClass, nme.equals_)
-
-
override def run {
scalaPrimitives.init
classes.clear
@@ -97,13 +92,15 @@ abstract class GenICode extends SubComponent {
gen(stats, ctx setPackage pid.name)
case ClassDef(mods, name, _, impl) =>
- log("Generating class: " + tree.symbol.fullNameString)
+ log("Generating class: " + tree.symbol.fullName)
val outerClass = ctx.clazz
ctx setClass (new IClass(tree.symbol) setCompilationUnit unit)
addClassFields(ctx, tree.symbol);
classes += (tree.symbol -> ctx.clazz)
unit.icode += ctx.clazz
gen(impl, ctx)
+ ctx.clazz.methods = ctx.clazz.methods.reverse // preserve textual order
+ ctx.clazz.fields = ctx.clazz.fields.reverse // preserve textual order
ctx setClass outerClass
// !! modules should be eliminated by refcheck... or not?
@@ -215,7 +212,7 @@ abstract class GenICode extends SubComponent {
case scalaPrimitives.NOT =>
ctx1.bb.emit(CALL_PRIMITIVE(Arithmetic(NOT, resKind)), larg.pos)
case _ =>
- abort("Unknown unary operation: " + fun.symbol.fullNameString +
+ abort("Unknown unary operation: " + fun.symbol.fullName +
" code: " + code)
}
@@ -301,6 +298,15 @@ abstract class GenICode extends SubComponent {
private def genSynchronized(tree: Apply, ctx: Context, expectedType: TypeKind): (Context, TypeKind) = {
val Apply(fun, args) = tree
val monitor = ctx.makeLocal(tree.pos, ObjectClass.tpe, "monitor")
+ var monitorResult: Local = null
+
+ // if the synchronized block returns a result, store it in a local variable. just leaving
+ // it on the stack is not valid in MSIL (stack is cleaned when leaving try-blocks)
+ val argTpe = args.head.tpe
+ val hasResult = expectedType != UNIT
+ if (hasResult)
+ monitorResult = ctx.makeLocal(tree.pos, argTpe, "monitorResult")
+
var ctx1 = genLoadQualifier(fun, ctx)
ctx1.bb.emit(Seq(
DUP(ANY_REF_CLASS),
@@ -313,6 +319,8 @@ abstract class GenICode extends SubComponent {
ctx1 = ctx1.Try(
bodyCtx => {
val ctx2 = genLoad(args.head, bodyCtx, expectedType /* toTypeKind(tree.tpe.resultType) */)
+ if (hasResult)
+ ctx2.bb.emit(STORE_LOCAL(monitorResult))
ctx2.bb.emit(Seq(
LOAD_LOCAL(monitor),
MONITOR_EXIT() setPos tree.pos
@@ -332,7 +340,8 @@ abstract class GenICode extends SubComponent {
debugLog("synchronized block end with block %s closed=%s".format(ctx1.bb, ctx1.bb.closed))
ctx1.exitSynchronized(monitor)
-
+ if (hasResult)
+ ctx1.bb.emit(LOAD_LOCAL(monitorResult))
(ctx1, expectedType)
}
@@ -418,6 +427,8 @@ abstract class GenICode extends SubComponent {
genArithmeticOp(tree, ctx, code)
else if (code == scalaPrimitives.CONCAT)
(genStringConcat(tree, ctx), STRING)
+ else if (code == scalaPrimitives.HASH)
+ (genScalaHash(receiver, ctx), INT)
else if (isArrayOp(code))
genArrayOp(tree, ctx, code, expectedType)
else if (isLogicalOp(code) || isComparisonOp(code)) {
@@ -442,7 +453,7 @@ abstract class GenICode extends SubComponent {
genCoercion(tree, ctx1, code)
(ctx1, scalaPrimitives.generatedKind(code))
}
- else abort("Primitive operation not handled yet: " + sym.fullNameString + "(" +
+ else abort("Primitive operation not handled yet: " + sym.fullName + "(" +
fun.symbol.simpleName + ") " + " at: " + (tree.pos))
}
@@ -562,7 +573,7 @@ abstract class GenICode extends SubComponent {
val cast = sym match {
case Object_isInstanceOf => false
case Object_asInstanceOf => true
- case _ => abort("Unexpected type application " + fun + "[sym: " + sym.fullNameString + "]" + " in: " + tree)
+ case _ => abort("Unexpected type application " + fun + "[sym: " + sym.fullName + "]" + " in: " + tree)
}
val Select(obj, _) = fun
@@ -646,7 +657,7 @@ abstract class GenICode extends SubComponent {
case rt @ REFERENCE(cls) =>
if (settings.debug.value)
assert(ctor.owner == cls,
- "Symbol " + ctor.owner.fullNameString + " is different than " + tpt)
+ "Symbol " + ctor.owner.fullName + " is different than " + tpt)
val nw = NEW(rt)
ctx.bb.emit(nw, tree.pos)
ctx.bb.emit(DUP(generatedType))
@@ -663,7 +674,7 @@ abstract class GenICode extends SubComponent {
case Apply(fun @ _, List(expr)) if (definitions.isBox(fun.symbol)) =>
if (settings.debug.value)
- log("BOX : " + fun.symbol.fullNameString);
+ log("BOX : " + fun.symbol.fullName);
val ctx1 = genLoad(expr, ctx, toTypeKind(expr.tpe))
val nativeKind = toTypeKind(expr.tpe)
if (settings.Xdce.value) {
@@ -680,7 +691,7 @@ abstract class GenICode extends SubComponent {
case Apply(fun @ _, List(expr)) if (definitions.isUnbox(fun.symbol)) =>
if (settings.debug.value)
- log("UNBOX : " + fun.symbol.fullNameString)
+ log("UNBOX : " + fun.symbol.fullName)
val ctx1 = genLoad(expr, ctx, toTypeKind(expr.tpe))
val boxType = toTypeKind(fun.symbol.owner.linkedClassOfClass.tpe)
generatedType = boxType
@@ -742,8 +753,8 @@ abstract class GenICode extends SubComponent {
else cm setHostClass qualSym
if (settings.debug.value) log(
- if (qualSym == ArrayClass) "Stored target type kind " + toTypeKind(qual.tpe) + " for " + sym.fullNameString
- else "Set more precise host class for " + sym.fullNameString + " host: " + qualSym
+ if (qualSym == ArrayClass) "Stored target type kind " + toTypeKind(qual.tpe) + " for " + sym.fullName
+ else "Set more precise host class for " + sym.fullName + " host: " + qualSym
)
case _ =>
}
@@ -829,8 +840,7 @@ abstract class GenICode extends SubComponent {
generatedType = l.kind
} catch {
case ex: MatchError =>
- throw new Error("symbol " + tree.symbol +
- " does not exist in " + ctx.method)
+ abort("symbol " + tree.symbol + " does not exist in " + ctx.method)
}
}
}
@@ -1162,6 +1172,19 @@ abstract class GenICode extends SubComponent {
ctx1
}
+ /** Generate the scala ## method.
+ */
+ def genScalaHash(tree: Tree, ctx: Context): Context = {
+ val hashMethod = {
+ ctx.bb.emit(LOAD_MODULE(ScalaRunTimeModule))
+ getMember(ScalaRunTimeModule, "hash")
+ }
+
+ val ctx1 = genLoad(tree, ctx, ANY_REF_CLASS)
+ ctx1.bb.emit(CALL_METHOD(hashMethod, Static(false)))
+ ctx1
+ }
+
/**
* Returns a list of trees that each should be concatenated, from
* left to right. It turns a chained call like "a".+("b").+("c") into
@@ -1342,16 +1365,32 @@ abstract class GenICode extends SubComponent {
* comparison might have a run-time type subtype of java.lang.Number or java.lang.Character.
* When it is statically known that both sides are equal and subtypes of Number of Character,
* not using the rich equality is possible (their own equals method will do ok.)*/
- def mustUseAnyComparator: Boolean =
- isMaybeBoxed(l.tpe.typeSymbol) && isMaybeBoxed(r.tpe.typeSymbol)
+ def mustUseAnyComparator: Boolean = {
+ def areSameFinals = l.tpe.isFinalType && r.tpe.isFinalType && (l.tpe =:= r.tpe)
+ !areSameFinals && isMaybeBoxed(l.tpe.typeSymbol) && isMaybeBoxed(r.tpe.typeSymbol)
+ }
if (mustUseAnyComparator) {
// when -optimise is on we call the @inline-version of equals, found in ScalaRunTime
val equalsMethod =
- if (!settings.XO.value) BoxesRunTime_equals
+ if (!settings.XO.value) {
+ def default = platform.externalEquals
+ platform match {
+ case x: JavaPlatform =>
+ import x._
+ if (l.tpe <:< BoxedNumberClass.tpe) {
+ if (r.tpe <:< BoxedNumberClass.tpe) externalEqualsNumNum
+ else if (r.tpe <:< BoxedCharacterClass.tpe) externalEqualsNumChar
+ else externalEqualsNumObject
+ }
+ else default
+
+ case _ => default
+ }
+ }
else {
- ctx.bb.emit(LOAD_MODULE(definitions.ScalaRunTimeModule))
- definitions.getMember(definitions.ScalaRunTimeModule, nme.inlinedEquals)
+ ctx.bb.emit(LOAD_MODULE(ScalaRunTimeModule))
+ getMember(ScalaRunTimeModule, nme.inlinedEquals)
}
val ctx1 = genLoad(l, ctx, ANY_REF_CLASS)
@@ -1364,48 +1403,35 @@ abstract class GenICode extends SubComponent {
if (isNull(l))
// null == expr -> expr eq null
genLoad(r, ctx, ANY_REF_CLASS).bb emitOnly CZJUMP(thenCtx.bb, elseCtx.bb, EQ, ANY_REF_CLASS)
- else {
+ else if (isNull(r)) {
+ // expr == null -> expr eq null
+ genLoad(l, ctx, ANY_REF_CLASS).bb emitOnly CZJUMP(thenCtx.bb, elseCtx.bb, EQ, ANY_REF_CLASS)
+ } else {
val eqEqTempLocal = getTempLocal
var ctx1 = genLoad(l, ctx, ANY_REF_CLASS)
// dicey refactor section
lazy val nonNullCtx = ctx1.newBlock
- if (isNull(r)) {
- // expr == null -> if (l eq null) true else l.equals(null)
- ctx1.bb.emitOnly(
- DUP(ANY_REF_CLASS),
- STORE_LOCAL(eqEqTempLocal) setPos l.pos,
- CZJUMP(thenCtx.bb, nonNullCtx.bb, EQ, ANY_REF_CLASS)
- )
- nonNullCtx.bb.emitOnly(
- LOAD_LOCAL(eqEqTempLocal) setPos l.pos,
- CONSTANT(Constant(null)) setPos r.pos,
- CALL_METHOD(Object_equals, Dynamic),
- CZJUMP(thenCtx.bb, elseCtx.bb, NE, BOOL)
- )
- }
- else {
- // l == r -> if (l eq null) r eq null else l.equals(r)
- ctx1 = genLoad(r, ctx1, ANY_REF_CLASS)
- val nullCtx = ctx1.newBlock
-
- ctx1.bb.emitOnly(
- STORE_LOCAL(eqEqTempLocal) setPos l.pos,
- DUP(ANY_REF_CLASS),
- CZJUMP(nullCtx.bb, nonNullCtx.bb, EQ, ANY_REF_CLASS)
- )
- nullCtx.bb.emitOnly(
- DROP(ANY_REF_CLASS) setPos l.pos, // type of AnyRef
- LOAD_LOCAL(eqEqTempLocal),
- CZJUMP(thenCtx.bb, elseCtx.bb, EQ, ANY_REF_CLASS)
- )
- nonNullCtx.bb.emitOnly(
- LOAD_LOCAL(eqEqTempLocal) setPos l.pos,
- CALL_METHOD(Object_equals, Dynamic),
- CZJUMP(thenCtx.bb, elseCtx.bb, NE, BOOL)
- )
- }
+ // l == r -> if (l eq null) r eq null else l.equals(r)
+ ctx1 = genLoad(r, ctx1, ANY_REF_CLASS)
+ val nullCtx = ctx1.newBlock
+
+ ctx1.bb.emitOnly(
+ STORE_LOCAL(eqEqTempLocal) setPos l.pos,
+ DUP(ANY_REF_CLASS),
+ CZJUMP(nullCtx.bb, nonNullCtx.bb, EQ, ANY_REF_CLASS)
+ )
+ nullCtx.bb.emitOnly(
+ DROP(ANY_REF_CLASS) setPos l.pos, // type of AnyRef
+ LOAD_LOCAL(eqEqTempLocal),
+ CZJUMP(thenCtx.bb, elseCtx.bb, EQ, ANY_REF_CLASS)
+ )
+ nonNullCtx.bb.emitOnly(
+ LOAD_LOCAL(eqEqTempLocal) setPos l.pos,
+ CALL_METHOD(Object_equals, Dynamic),
+ CZJUMP(thenCtx.bb, elseCtx.bb, NE, BOOL)
+ )
}
}
}
@@ -1419,7 +1445,7 @@ abstract class GenICode extends SubComponent {
assert(ctx.clazz.symbol eq cls,
"Classes are not the same: " + ctx.clazz.symbol + ", " + cls)
- /** Non-method term members are fields, except for moudle members. Module
+ /** Non-method term members are fields, except for module members. Module
* members can only happen on .NET (no flatten) for inner traits. There,
* a module symbol is generated (transformInfo in mixin) which is used
* as owner for the members of the implementation class (so that the
@@ -1477,10 +1503,10 @@ abstract class GenICode extends SubComponent {
def prune0(block: BasicBlock): Unit = {
val optCont = block.lastInstruction match {
- case JUMP(b) if (b != block) => Some(b);
+ case JUMP(b) if (b != block) => Some(b)
case _ => None
}
- if (block.size == 1 && optCont != None) {
+ if (block.size == 1 && optCont.isDefined) {
val Some(cont) = optCont;
val pred = block.predecessors;
log("Preds: " + pred + " of " + block + " (" + optCont + ")");
@@ -1583,7 +1609,7 @@ abstract class GenICode extends SubComponent {
* to delay it any more: they will be used at some point.
*/
class DuplicateLabels(boundLabels: collection.Set[Symbol]) extends Transformer {
- val labels: Map[Symbol, Symbol] = new HashMap
+ val labels: mutable.Map[Symbol, Symbol] = new HashMap
var method: Symbol = _
var ctx: Context = _
@@ -1631,9 +1657,11 @@ abstract class GenICode extends SubComponent {
abstract class Cleanup;
case class MonitorRelease(m: Local) extends Cleanup {
+ override def hashCode = m.hashCode
override def equals(other: Any) = m == other;
}
case class Finalizer(f: Tree) extends Cleanup {
+ override def hashCode = f.hashCode
override def equals(other: Any) = f == other;
}
@@ -1881,7 +1909,7 @@ abstract class GenICode extends SubComponent {
val kind = toTypeKind(tree.tpe)
val guardResult = kind != UNIT && mayCleanStack(finalizer)
// we need to save bound labels before any code generation is performed on
- // the current context (otherwise, any new lables in the finalizer that need to
+ // the current context (otherwise, any new labels in the finalizer that need to
// be duplicated would be incorrectly considered bound -- see #2850).
val boundLabels: collection.Set[Symbol] = Set.empty ++ labels.keySet
@@ -1954,12 +1982,16 @@ abstract class GenICode extends SubComponent {
*
* A
* try { .. } catch { .. } finally { .. }
- * blocks is de-sugared into
+ * block is de-sugared into
* try { try { ..} catch { .. } } finally { .. }
*
- * A `finally` block is represented exactly the same as an exception handler, but
- * with `NoSymbol` as the exception class. The covered blocks are all blocks of
+ * In ICode `finally` block is represented exactly the same as an exception handler,
+ * but with `NoSymbol` as the exception class. The covered blocks are all blocks of
* the `try { .. } catch { .. }`.
+ *
+ * Also, TryMsil does not enter any Finalizers into the `cleanups', because the
+ * CLI takes care of running the finalizer when seeing a `leave' statement inside
+ * a try / catch.
*/
def TryMsil(body: Context => Context,
handlers: List[(Symbol, TypeKind, (Context => Context))],
@@ -1978,6 +2010,7 @@ abstract class GenICode extends SubComponent {
val ctx = finalizerCtx.enterHandler(exh)
if (settings.Xdce.value) ctx.bb.emit(LOAD_EXCEPTION())
val ctx1 = genLoad(finalizer, ctx, UNIT)
+ // need jump for the ICode to be valid. MSIL backend will emit `Endfinally` instead.
ctx1.bb.emit(JUMP(afterCtx.bb))
ctx1.bb.close
finalizerCtx.endHandler()
@@ -1988,6 +2021,7 @@ abstract class GenICode extends SubComponent {
var ctx1 = outerCtx.enterHandler(exh)
if (settings.Xdce.value) ctx1.bb.emit(LOAD_EXCEPTION())
ctx1 = handler._3(ctx1)
+ // msil backend will emit `Leave` to jump out of a handler
ctx1.bb.emit(JUMP(afterCtx.bb))
ctx1.bb.close
outerCtx.endHandler()
@@ -2000,6 +2034,7 @@ abstract class GenICode extends SubComponent {
outerCtx.bb.emit(JUMP(bodyCtx.bb))
outerCtx.bb.close
+ // msil backend will emit `Leave` to jump out of a try-block
finalCtx.bb.emit(JUMP(afterCtx.bb))
finalCtx.bb.close
@@ -2044,7 +2079,7 @@ abstract class GenICode extends SubComponent {
* jumps to the given basic block.
*/
def patch(code: Code) {
- def substMap: Map[Instruction, Instruction] = {
+ def substMap: mutable.Map[Instruction, Instruction] = {
val map = new HashMap[Instruction, Instruction]()
toPatch foreach (i => map += (i -> patch(i)))
@@ -2094,7 +2129,7 @@ abstract class GenICode extends SubComponent {
///////////////// Fake instructions //////////////////////////
/**
- * Pseudo jump: it takes a Label instead of a basick block.
+ * Pseudo jump: it takes a Label instead of a basic block.
* It is used temporarily during code generation. It is replaced
* by a real JUMP instruction when all labels are resolved.
*/
diff --git a/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala b/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala
index d2c803b397..0810a64f5d 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala
@@ -54,12 +54,22 @@ abstract class ICodes extends AnyRef
else
global.abort("Unknown linearizer: " + global.settings.Xlinearizer.value)
+ /** Have to be careful because dump calls around, possibly
+ * re-entering methods which initiated the dump (like foreach
+ * in BasicBlocks) which leads to the icode output olympics.
+ */
+ private var alreadyDumping = false
+
/** Print all classes and basic blocks. Used for debugging. */
+
def dump {
+ if (alreadyDumping) return
+ else alreadyDumping = true
+
val printer = new TextPrinter(new PrintWriter(Console.out, true),
new DumpLinearizer)
- classes.valuesIterator foreach printer.printClass
+ classes.values foreach printer.printClass
}
object liveness extends Liveness {
@@ -72,13 +82,6 @@ abstract class ICodes extends AnyRef
lazy val AnyRefReference: TypeKind = REFERENCE(global.definitions.ObjectClass)
- import global.settings
- if (settings.XO.value) {
- settings.inline.value = true
- settings.Xcloselim.value = true
- settings.Xdce.value = true
- }
-
object icodeReader extends ICodeReader {
lazy val global: ICodes.this.global.type = ICodes.this.global
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala b/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala
index 22d7ce90b7..d090454129 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala
@@ -238,7 +238,7 @@ trait Linearizers { self: ICodes =>
covered.size + (hs :\ 0)((h, s) => h.blocks.length + s)
}
- val tryBlocks = handlersByCovered.keysIterator.toList.sortWith(size(_) > size(_))
+ val tryBlocks = handlersByCovered.keys.toList sortBy size
var result = normalLinearizer.linearize(m)
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Members.scala b/src/compiler/scala/tools/nsc/backend/icode/Members.scala
index a5c740f9e5..82e99d50ac 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Members.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Members.scala
@@ -108,7 +108,7 @@ trait Members { self: ICodes =>
this
}
- override def toString() = symbol.fullNameString
+ override def toString() = symbol.fullName
def lookupField(s: Symbol) = fields find (_.symbol == s)
def lookupMethod(s: Symbol) = methods find (_.symbol == s)
@@ -201,7 +201,7 @@ trait Members { self: ICodes =>
/* determines whether or not this method is the class static constructor. */
def isStaticCtor: Boolean = isStatic && symbol.rawname == nme.CONSTRUCTOR
- override def toString() = symbol.fullNameString
+ override def toString() = symbol.fullName
import opcodes._
def checkLocals: Unit = if (code ne null) {
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
index f7baab286b..c0255cda65 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
@@ -170,7 +170,7 @@ trait Opcodes { self: ICodes =>
case class LOAD_FIELD(field: Symbol, isStatic: Boolean) extends Instruction {
/** Returns a string representation of this instruction */
override def toString(): String =
- "LOAD_FIELD " + (if (isStatic) field.fullNameString else field.toString());
+ "LOAD_FIELD " + (if (isStatic) field.fullName else field.toString());
override def consumed = if (isStatic) 0 else 1
override def produced = 1
@@ -320,7 +320,7 @@ trait Opcodes { self: ICodes =>
case class CALL_METHOD(method: Symbol, style: InvokeStyle) extends Instruction {
/** Returns a string representation of this instruction */
override def toString(): String =
- "CALL_METHOD " + hostClass.fullNameString + method.fullNameString +" ("+style.toString()+")";
+ "CALL_METHOD " + hostClass.fullName + method.fullName +" ("+style.toString()+")";
var hostClass: Symbol = method.owner;
def setHostClass(cls: Symbol): this.type = { hostClass = cls; this }
@@ -355,7 +355,7 @@ trait Opcodes { self: ICodes =>
0
else 1
- /** object idenity is equality for CALL_METHODs. Needed for
+ /** object identity is equality for CALL_METHODs. Needed for
* being able to store such instructions into maps, when more
* than one CALL_METHOD to the same method might exist.
*/
diff --git a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala
index 8e06e6b5f9..4b53a5e2ae 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala
@@ -138,7 +138,7 @@ trait TypeKinds { self: ICodes =>
(b.isReferenceType || b.isArrayType))
toTypeKind(lub0(a.toType, b.toType))
else
- throw new CheckerError("Incompatible types: " + a + " with " + b)
+ throw new CheckerException("Incompatible types: " + a + " with " + b)
}
}
@@ -261,7 +261,7 @@ trait TypeKinds { self: ICodes =>
"REFERENCE to NoSymbol not allowed!")
override def toString(): String =
- "REFERENCE(" + cls.fullNameString + ")"
+ "REFERENCE(" + cls.fullName + ")"
/**
* Approximate `lub'. The common type of two references is
@@ -306,7 +306,7 @@ trait TypeKinds { self: ICodes =>
// abort(toString() + " maxType " + other.toString());
// override def toString(): String =
-// "VALUE(" + cls.fullNameString + ")";
+// "VALUE(" + cls.fullName + ")";
// }
def ArrayN(elem: TypeKind, dims: Int): ARRAY = {
@@ -431,8 +431,12 @@ trait TypeKinds { self: ICodes =>
////////////////// Conversions //////////////////////////////
- /** Return the TypeKind of the given type */
- def toTypeKind(t: Type): TypeKind = t match {
+ /** Return the TypeKind of the given type
+ *
+ * Call to .normalize fixes #3003 (follow type aliases). Otherwise,
+ * arrayOrClassType below would return AnyRefReference.
+ */
+ def toTypeKind(t: Type): TypeKind = t.normalize match {
case ThisType(sym) =>
if (sym == ArrayClass)
AnyRefReference
diff --git a/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala b/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala
index 1e943529b2..6872f56c39 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala
@@ -76,8 +76,7 @@ trait TypeStacks { self: ICodes =>
* types agree if one is a subtype of the other.
*/
def agreesWith(other: TypeStack): Boolean =
- (types.length == other.types.length) &&
- ((types, other.types).zipped forall ((t1, t2) => t1 <:< t2 || t2 <:< t1))
+ (types corresponds other.types)((t1, t2) => t1 <:< t2 || t2 <:< t1)
/* This method returns a String representation of the stack */
override def toString() = types.mkString("\n", "\n", "\n")
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/CompleteLattice.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/CompleteLattice.scala
index c26030cdb6..95f4418759 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/CompleteLattice.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/CompleteLattice.scala
@@ -40,7 +40,7 @@ trait CompleteLattice {
def lub(xs: List[Elem], exceptional: Boolean): Elem = try {
if (xs == Nil) bottom else xs reduceLeft lub2(exceptional)
} catch {
- case e: LubError =>
+ case e: LubException =>
Console.println("Lub on blocks: " + xs)
throw e
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala
index 8a65875fbf..1fbbc10c3d 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala
@@ -58,18 +58,13 @@ abstract class CopyPropagation {
class State(val bindings: Bindings, var stack: List[Value]) {
override def equals(that: Any): Boolean =
- (this eq that.asInstanceOf[AnyRef]) ||
- that.isInstanceOf[State] && {
- val other = that.asInstanceOf[State]
-
+ (this eq that.asInstanceOf[AnyRef]) || (that match {
/* comparison with bottom is reference equality! */
- if ((other eq bottom) || (this eq bottom))
- (this eq other)
- else {
+ case other: State if (this ne bottom) && (other ne bottom) =>
(this.bindings == other.bindings) &&
- ((this.stack, other.stack).zipped forall (_ == _))
- }
- }
+ (this.stack corresponds other.stack)(_ == _) // @PP: corresponds
+ case _ => false
+ })
/* Return an alias for the given local. It returns the last
* local in the chain of aliased locals. Cycles are not allowed
@@ -189,14 +184,14 @@ abstract class CopyPropagation {
if (exceptional) exceptionHandlerStack
else {
// if (a.stack.length != b.stack.length)
-// throw new LubError(a, b, "Invalid stacks in states: ");
+// throw new LubException(a, b, "Invalid stacks in states: ");
(a.stack, b.stack).zipped map { (v1, v2) =>
if (v1 == v2) v1 else Unknown
}
}
/* if (a.stack.length != b.stack.length)
- throw new LubError(a, b, "Invalid stacks in states: ");
+ throw new LubException(a, b, "Invalid stacks in states: ");
val resStack = List.map2(a.stack, b.stack) { (v1, v2) =>
if (v1 == v2) v1 else Unknown
}
@@ -359,7 +354,7 @@ abstract class CopyPropagation {
if (onInstance) {
val obj = out.stack.drop(method.info.paramTypes.length).head
// if (method.isPrimaryConstructor) {
- if (method.isPrimaryConstructor/* && isClosureClass(method.owner)*/) {
+ if (method.isPrimaryConstructor) {
obj match {
case Record(_, bindings) =>
for (v <- out.stack.take(method.info.paramTypes.length + 1)
@@ -533,7 +528,7 @@ abstract class CopyPropagation {
final def invalidateRecords(state: copyLattice.State) {
def shouldRetain(sym: Symbol): Boolean = {
if (sym.hasFlag(symtab.Flags.MUTABLE))
- log("dropping binding for " + sym.fullNameString)
+ log("dropping binding for " + sym.fullName)
!sym.hasFlag(symtab.Flags.MUTABLE)
}
state.stack = state.stack map { v => v match {
@@ -580,7 +575,7 @@ abstract class CopyPropagation {
// this relies on having the same order in paramAccessors and
// the arguments on the stack. It should be the same!
for ((p, i) <- paramAccessors.zipWithIndex) {
-// assert(p.tpe == paramTypes(i), "In: " + ctor.fullNameString
+// assert(p.tpe == paramTypes(i), "In: " + ctor.fullName
// + " having acc: " + (paramAccessors map (_.tpe))+ " vs. params" + paramTypes
// + "\n\t failed at pos " + i + " with " + p.tpe + " == " + paramTypes(i))
if (p.tpe == paramTypes(i))
@@ -592,18 +587,6 @@ abstract class CopyPropagation {
bindings
}
- /** Is <code>cls</code> a closure class?
- *
- * @param cls ...
- * @return ...
- */
- final def isClosureClass(cls: Symbol): Boolean =
- cls.isFinal &&
- cls.tpe.parents.exists { t =>
- val TypeRef(_, sym, _) = t;
- definitions.FunctionClass exists sym.==
- }
-
/** Is symbol <code>m</code> a pure method?
*
* @param m ...
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/LubError.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/LubException.scala
index ae0991f60a..0474d12a8b 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/LubError.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/LubException.scala
@@ -8,6 +8,6 @@
package scala.tools.nsc
package backend.icode.analysis
-class LubError(a: Any, b: Any, msg: String) extends Exception {
+class LubException(a: Any, b: Any, msg: String) extends Exception {
override def toString() = "Lub error: " + msg + a + b
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
index 9cc0c93928..858512c9b1 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
@@ -52,7 +52,7 @@ abstract class TypeFlowAnalysis {
else if ((s1 eq exceptionHandlerStack) || (s2 eq exceptionHandlerStack)) Predef.error("merging with exhan stack")
else {
// if (s1.length != s2.length)
-// throw new CheckerError("Incompatible stacks: " + s1 + " and " + s2);
+// throw new CheckerException("Incompatible stacks: " + s1 + " and " + s2);
new TypeStack((s1.types, s2.types).zipped map icodes.lub)
}
}
@@ -180,7 +180,7 @@ abstract class TypeFlowAnalysis {
assert(visited.contains(b),
"Block " + b + " in " + this.method + " has input equal to bottom -- not visited? .." + visited));
}
-// log("" + method.symbol.fullNameString + " [" + method.code.blocks.size + " blocks] "
+// log("" + method.symbol.fullName + " [" + method.code.blocks.size + " blocks] "
// + "\n\t" + iterations + " iterations: " + t + " ms."
// + "\n\tlubs: " + typeFlowLattice.lubs + " out of which " + icodes.lubs0 + " typer lubs")
}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
index 7496107798..40eb08adfd 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
@@ -8,17 +8,17 @@
package scala.tools.nsc
package backend.jvm
-import java.io.{ DataOutputStream, File, OutputStream }
import java.nio.ByteBuffer
import scala.collection.immutable.{Set, ListSet}
import scala.collection.mutable.{Map, HashMap, HashSet}
import scala.tools.nsc.io.AbstractFile
import scala.tools.nsc.symtab._
-import scala.tools.nsc.util.{Position, NoPosition}
import scala.tools.nsc.symtab.classfile.ClassfileConstants._
import ch.epfl.lamp.fjbg._
+import java.io.{ByteArrayOutputStream, DataOutputStream, File, OutputStream}
+import reflect.generic.{PickleFormat, PickleBuffer}
/** This class ...
*
@@ -34,7 +34,7 @@ abstract class GenJVM extends SubComponent {
val phaseName = "jvm"
/** Create a new phase */
- override def newPhase(p: Phase) = new JvmPhase(p)
+ override def newPhase(p: Phase): Phase = new JvmPhase(p)
/** JVM code generation phase
*/
@@ -50,7 +50,7 @@ abstract class GenJVM extends SubComponent {
for ((sym, cls) <- icodes.classes ; if inliner.isClosureClass(sym) && !deadCode.liveClosures(sym))
icodes.classes -= sym
- classes.valuesIterator foreach apply
+ classes.values foreach apply
}
override def apply(cls: IClass) {
@@ -74,10 +74,12 @@ abstract class GenJVM extends SubComponent {
class BytecodeGenerator {
import JAccessFlags._
+ def debugLevel = settings.debuginfo.indexOfChoice
+
val MIN_SWITCH_DENSITY = 0.7
val INNER_CLASSES_FLAGS =
(ACC_PUBLIC | ACC_PRIVATE | ACC_PROTECTED | ACC_STATIC | ACC_FINAL | ACC_INTERFACE | ACC_ABSTRACT)
- val StringBuilderClass = definitions.getClass2("scala.StringBuilder", "scala.collection.mutable.StringBuilder").fullNameString
+ val StringBuilderClass = definitions.getClass2("scala.StringBuilder", "scala.collection.mutable.StringBuilder").fullName
val BoxesRunTime = "scala.runtime.BoxesRunTime"
val StringBuilderType = new JObjectType(StringBuilderClass)
@@ -103,6 +105,16 @@ abstract class GenJVM extends SubComponent {
lazy val RemoteInterface = definitions.getClass("java.rmi.Remote")
lazy val RemoteException = definitions.getClass("java.rmi.RemoteException").tpe
+
+ val versionPickle = {
+ val vp = new PickleBuffer(new Array[Byte](16), -1, 0)
+ assert(vp.writeIndex == 0)
+ vp.writeNat(PickleFormat.MajorVersion)
+ vp.writeNat(PickleFormat.MinorVersion)
+ vp.writeNat(0)
+ vp
+ }
+
var clasz: IClass = _
var method: IMethod = _
var jclass: JClass = _
@@ -113,9 +125,9 @@ abstract class GenJVM extends SubComponent {
val fjbgContext = new FJBGContext(49, 0)
- val emitSource = settings.debuginfo.level >= 1
- val emitLines = settings.debuginfo.level >= 2
- val emitVars = settings.debuginfo.level >= 3
+ val emitSource = debugLevel >= 1
+ val emitLines = debugLevel >= 2
+ val emitVars = debugLevel >= 3
/** Write a class to disk, adding the Scala signature (pickled type information) and
* inner classes.
@@ -124,27 +136,7 @@ abstract class GenJVM extends SubComponent {
* @param sym The corresponding symbol, used for looking up pickled information
*/
def emitClass(jclass: JClass, sym: Symbol) {
- def addScalaAttr(sym: Symbol): Unit = currentRun.symData.get(sym) match {
- case Some(pickle) =>
- val scalaAttr = fjbgContext.JOtherAttribute(jclass,
- jclass,
- nme.ScalaSignatureATTR.toString,
- pickle.bytes,
- pickle.writeIndex)
- pickledBytes = pickledBytes + pickle.writeIndex
- jclass.addAttribute(scalaAttr)
- currentRun.symData -= sym
- currentRun.symData -= sym.linkedSym
- //System.out.println("Generated ScalaSig Attr for " + sym)//debug
- case _ =>
- val markerAttr = getMarkerAttr(jclass)
- jclass.addAttribute(markerAttr)
- log("Could not find pickle information for " + sym)
- }
- if (!(jclass.getName().endsWith("$") && sym.isModuleClass))
- addScalaAttr(if (isTopLevelModule(sym)) sym.sourceModule else sym);
addInnerClasses(jclass)
-
val outfile = getFile(sym, jclass, ".class")
val outstream = new DataOutputStream(outfile.bufferedOutput)
jclass.writeTo(outstream)
@@ -152,8 +144,39 @@ abstract class GenJVM extends SubComponent {
informProgress("wrote " + outfile)
}
- private def getMarkerAttr(jclass: JClass): JOtherAttribute =
- fjbgContext.JOtherAttribute(jclass, jclass, nme.ScalaATTR.toString, new Array[Byte](0), 0)
+ /** Returns the ScalaSignature annotation if it must be added to this class, none otherwise; furthermore, it adds to
+ * jclass the ScalaSig marker attribute (marking that a scala signature annotation is present) or the Scala marker
+ * attribute (marking that the signature for this class is in another file). The annotation that is returned by
+ * this method must be added to the class' annotations list when generating them.
+ * @param jclass The class file that is being readied.
+ * @param sym The symbol for which the signature has been entered in the symData map. This is different than the
+ * symbol that is being generated in the case of a mirror class.
+ * @return An option that is:
+ * - defined and contains an annotation info of the ScalaSignature type, instantiated with the
+ * pickle signature for sym (a ScalaSig marker attribute has been written);
+ * - undefined if the jclass/sym couple must not contain a signature (a Scala marker attribute has
+ * been written). */
+ def scalaSignatureAddingMarker(jclass: JClass, sym: Symbol): Option[AnnotationInfo] =
+ currentRun.symData.get(sym) match {
+ case Some(pickle) if !jclass.getName().endsWith("$") =>
+ val scalaAttr =
+ fjbgContext.JOtherAttribute(jclass, jclass, nme.ScalaSignatureATTR.toString,
+ versionPickle.bytes, versionPickle.writeIndex)
+ jclass.addAttribute(scalaAttr)
+ val scalaAnnot =
+ AnnotationInfo(definitions.ScalaSignatureAnnotation.tpe, Nil, List(
+ (nme.bytes, ScalaSigBytes(pickle.bytes.take(pickle.writeIndex)))
+ ))
+ pickledBytes = pickledBytes + pickle.writeIndex
+ currentRun.symData -= sym
+ currentRun.symData -= sym.companionSymbol
+ Some(scalaAnnot)
+ case _ =>
+ val markerAttr =
+ fjbgContext.JOtherAttribute(jclass, jclass, nme.ScalaATTR.toString, new Array[Byte](0), 0)
+ jclass.addAttribute(markerAttr)
+ None
+ }
var serialVUID: Option[Long] = None
var remoteClass: Boolean = false
@@ -184,7 +207,7 @@ abstract class GenJVM extends SubComponent {
case _ => ()
}
- parents = parents.removeDuplicates
+ parents = parents.distinct
if (parents.length > 1) {
ifaces = new Array[String](parents.length - 1)
@@ -197,8 +220,6 @@ abstract class GenJVM extends SubComponent {
javaName(parents(0).typeSymbol),
ifaces,
c.cunit.source.toString)
- if (jclass.getName.endsWith("$"))
- jclass.addAttribute(getMarkerAttr(jclass))
if (isStaticModule(c.symbol) || serialVUID != None || clasz.bootstrapClass.isDefined) {
if (isStaticModule(c.symbol))
@@ -206,11 +227,11 @@ abstract class GenJVM extends SubComponent {
addStaticInit(jclass, c.lookupStaticCtor)
if (isTopLevelModule(c.symbol)) {
- if (c.symbol.linkedClassOfModule == NoSymbol)
+ if (c.symbol.companionClass == NoSymbol)
dumpMirrorClass(c.symbol, c.cunit.source.toString);
else
log("No mirror class for module with linked class: " +
- c.symbol.fullNameString)
+ c.symbol.fullName)
}
}
else {
@@ -222,7 +243,7 @@ abstract class GenJVM extends SubComponent {
!(sym.name.toString contains '$') && (sym hasFlag Flags.MODULE) && !sym.isImplClass && !sym.isNestedClass
}
- val lmoc = c.symbol.linkedModuleOfClass
+ val lmoc = c.symbol.companionModule
// add static forwarders if there are no name conflicts; see bugs #363 and #1735
if (lmoc != NoSymbol && !c.symbol.hasFlag(Flags.INTERFACE)) {
if (isCandidateForForwarders(lmoc) && !settings.noForwarders.value) {
@@ -236,8 +257,9 @@ abstract class GenJVM extends SubComponent {
clasz.fields foreach genField
clasz.methods foreach genMethod
+ val ssa = scalaSignatureAddingMarker(jclass, c.symbol)
addGenericSignature(jclass, c.symbol, c.symbol.owner)
- addAnnotations(jclass, c.symbol.annotations)
+ addAnnotations(jclass, c.symbol.annotations ++ ssa)
emitClass(jclass, c.symbol)
if (c.symbol hasAnnotation BeanInfoAttr)
@@ -330,10 +352,10 @@ abstract class GenJVM extends SubComponent {
val buf: ByteBuffer = ByteBuffer.allocate(512)
var nattr = 0
- // put some radom value; the actual number is determined at the end
+ // put some random value; the actual number is determined at the end
buf.putShort(0xbaba.toShort)
- for (AnnotationInfo(tp, List(exc), _) <- excs.removeDuplicates if tp.typeSymbol == definitions.ThrowsClass) {
+ for (AnnotationInfo(tp, List(exc), _) <- excs.distinct if tp.typeSymbol == definitions.ThrowsClass) {
val Literal(const) = exc
buf.putShort(
cpool.addClass(
@@ -347,7 +369,7 @@ abstract class GenJVM extends SubComponent {
}
/** Whether an annotation should be emitted as a Java annotation
- * .initialize: if 'annnot' is read from pickle, atp might be un-initialized
+ * .initialize: if 'annot' is read from pickle, atp might be un-initialized
*/
private def shouldEmitAnnotation(annot: AnnotationInfo) =
(annot.atp.typeSymbol.initialize.hasFlag(Flags.JAVA) &&
@@ -394,6 +416,10 @@ abstract class GenJVM extends SubComponent {
buf.putShort(cpool.addUtf8(const.symbolValue.name.toString).toShort)
}
+ case ScalaSigBytes(bytes) =>
+ buf.put('s'.toByte)
+ buf.putShort(cpool.addUtf8(reflect.generic.ByteCodecs.encode(bytes)).toShort)
+
case ArrayAnnotArg(args) =>
buf.put('['.toByte)
buf.putShort(args.length.toShort)
@@ -437,7 +463,7 @@ abstract class GenJVM extends SubComponent {
&& !(sym.isMethod && sym.hasFlag(Flags.LIFTED))
&& !(sym.ownerChain exists (_.isImplClass))) { // @M don't generate java generics sigs for (members of) implementation classes, as they are monomorphic (TODO: ok?)
val memberTpe = atPhase(currentRun.erasurePhase)(owner.thisType.memberInfo(sym))
- // println("addGenericSignature sym: " + sym.fullNameString + " : " + memberTpe + " sym.info: " + sym.info)
+ // println("addGenericSignature sym: " + sym.fullName + " : " + memberTpe + " sym.info: " + sym.info)
// println("addGenericSignature: "+ (sym.ownerChain map (x => (x.name, x.isImplClass))))
erasure.javaSig(sym, memberTpe) match {
case Some(sig) =>
@@ -543,7 +569,7 @@ abstract class GenJVM extends SubComponent {
def genField(f: IField) {
if (settings.debug.value)
- log("Adding field: " + f.symbol.fullNameString);
+ log("Adding field: " + f.symbol.fullName);
var attributes = 0
f.symbol.annotations foreach { a => a match {
@@ -568,7 +594,7 @@ abstract class GenJVM extends SubComponent {
def genMethod(m: IMethod) {
if (m.isStaticCtor) return
- log("Generating method " + m.symbol.fullNameString)
+ log("Generating method " + m.symbol.fullName)
method = m
endPC.clear
computeLocalVarsIndex(m)
@@ -831,12 +857,17 @@ abstract class GenJVM extends SubComponent {
* for methods defined there - bug #1804 */
lazy val commonParents = {
val cps = module.info.baseClasses
- val mps = module.linkedClassOfModule.info.baseClasses
+ val mps = module.companionClass.info.baseClasses
cps.filter(mps contains)
}
- /* the setter doesn't show up in members so we inspect the name */
+ /* The setter doesn't show up in members so we inspect the name
+ * ... and clearly it helps to know how the name is encoded, see ticket #3004.
+ * This logic is grossly inadequate! Name mangling needs a devotee.
+ */
def conflictsInCommonParent(name: Name) =
- commonParents exists { cp => name startsWith (cp.name + "$") }
+ commonParents exists { cp =>
+ (name startsWith (cp.name + "$")) || (name containsName ("$" + cp.name + "$"))
+ }
/** Should method `m' get a forwarder in the mirror class? */
def shouldForward(m: Symbol): Boolean =
@@ -847,10 +878,10 @@ abstract class GenJVM extends SubComponent {
&& !m.isConstructor
&& !m.isStaticMember
&& !(m.owner == definitions.AnyClass)
- && !module.isSubClass(module.linkedClassOfModule)
+ && !module.isSubClass(module.companionClass)
&& !conflictsIn(definitions.ObjectClass, m.name)
&& !conflictsInCommonParent(m.name)
- && !conflictsIn(module.linkedClassOfModule, m.name)
+ && !conflictsIn(module.companionClass, m.name)
)
assert(module.isModuleClass)
@@ -877,6 +908,8 @@ abstract class GenJVM extends SubComponent {
JClass.NO_INTERFACES,
sourceFile)
addForwarders(mirrorClass, clasz)
+ val ssa = scalaSignatureAddingMarker(mirrorClass, clasz.companionSymbol)
+ addAnnotations(mirrorClass, clasz.annotations ++ ssa)
emitClass(mirrorClass, clasz)
}
@@ -1011,7 +1044,7 @@ abstract class GenJVM extends SubComponent {
varsInBlock.clear
for (instr <- b) {
- class CompilationError(msg: String) extends Error {
+ class CompilationException(msg: String) extends Exception(msg) {
override def toString: String = {
msg +
"\nCurrent method: " + method +
@@ -1021,7 +1054,7 @@ abstract class GenJVM extends SubComponent {
method.dump
}
}
- def assert(cond: Boolean, msg: String) = if (!cond) throw new CompilationError(msg);
+ def assert(cond: Boolean, msg: String) = if (!cond) throw new CompilationException(msg)
instr match {
case THIS(clasz) =>
@@ -1054,7 +1087,7 @@ abstract class GenJVM extends SubComponent {
case LOAD_MODULE(module) =>
// assert(module.isModule, "Expected module: " + module)
if (settings.debug.value)
- log("genearting LOAD_MODULE for: " + module + " flags: " +
+ log("generating LOAD_MODULE for: " + module + " flags: " +
Flags.flagsToString(module.flags));
if (clasz.symbol == module.moduleClass && jmethod.getName() != nme.readResolve.toString)
jcode.emitALOAD_0()
@@ -1137,11 +1170,11 @@ abstract class GenJVM extends SubComponent {
case BOX(kind) =>
val boxedType = definitions.boxedClass(kind.toType.typeSymbol)
val mtype = new JMethodType(javaType(boxedType), Array(javaType(kind)))
- jcode.emitINVOKESTATIC(BoxesRunTime, "boxTo" + boxedType.cleanNameString, mtype)
+ jcode.emitINVOKESTATIC(BoxesRunTime, "boxTo" + boxedType.decodedName, mtype)
case UNBOX(kind) =>
val mtype = new JMethodType(javaType(kind), Array(JObjectType.JAVA_LANG_OBJECT))
- jcode.emitINVOKESTATIC(BoxesRunTime, "unboxTo" + kind.toType.typeSymbol.cleanNameString, mtype)
+ jcode.emitINVOKESTATIC(BoxesRunTime, "unboxTo" + kind.toType.typeSymbol.decodedName, mtype)
case NEW(REFERENCE(cls)) =>
val className = javaName(cls)
@@ -1710,7 +1743,7 @@ abstract class GenJVM extends SubComponent {
}
(if (sym.isClass || (sym.isModule && !sym.isMethod))
- sym.fullNameString('/')
+ sym.fullName('/')
else
sym.simpleName.toString.trim()) + suffix
}
@@ -1770,7 +1803,7 @@ abstract class GenJVM extends SubComponent {
/** Calls to methods in 'sym' need invokeinterface? */
def needsInterfaceCall(sym: Symbol): Boolean = {
- log("checking for interface call: " + sym.fullNameString)
+ log("checking for interface call: " + sym.fullName)
// the following call to 'info' may cause certain symbols to fail loading because we're
// too late in the compilation chain (aliases to overloaded symbols will not be properly
// resolved, see scala.Range, method super$++ that fails in UnPickler at LazyTypeRefAndAlias.complete
@@ -1836,7 +1869,7 @@ abstract class GenJVM extends SubComponent {
def assert(cond: Boolean, msg: => String) = if (!cond) {
method.dump
- throw new Error(msg + "\nMethod: " + method)
+ abort(msg + "\nMethod: " + method)
}
def assert(cond: Boolean) { assert(cond, "Assertion failed.") }
diff --git a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala
index a50841f348..285e09295d 100644
--- a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala
+++ b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala
@@ -13,7 +13,6 @@ import java.nio.{ByteBuffer, ByteOrder}
import scala.collection.mutable.{Map, HashMap, HashSet, Stack, ListBuffer}
import scala.tools.nsc.symtab._
-import scala.tools.nsc.util.Position
import ch.epfl.lamp.compiler.msil.{Type => MsilType, _}
import ch.epfl.lamp.compiler.msil.emit._
@@ -43,15 +42,15 @@ abstract class GenMSIL extends SubComponent {
val codeGenerator = new BytecodeGenerator
//classes is ICodes.classes, a HashMap[Symbol, IClass]
- classes.valuesIterator foreach codeGenerator.findEntryPoint
+ classes.values foreach codeGenerator.findEntryPoint
codeGenerator.initAssembly
- classes.valuesIterator foreach codeGenerator.createTypeBuilder
- classes.valuesIterator foreach codeGenerator.createClassMembers
+ classes.values foreach codeGenerator.createTypeBuilder
+ classes.values foreach codeGenerator.createClassMembers
try {
- classes.valuesIterator foreach codeGenerator.genClass
+ classes.values foreach codeGenerator.genClass
} finally {
codeGenerator.writeAssembly
}
@@ -248,7 +247,7 @@ abstract class GenMSIL extends SubComponent {
assemblyName.Name = assemName
massembly = AssemblyBuilderFactory.DefineDynamicAssembly(assemblyName)
- moduleName = assemName + (if (entryPoint == null) ".dll" else ".exe")
+ moduleName = assemName // + (if (entryPoint == null) ".dll" else ".exe")
// filename here: .dll or .exe (in both parameters), second: give absolute-path
mmodule = massembly.DefineDynamicModule(moduleName,
new File(outDir, moduleName).getAbsolutePath())
@@ -293,7 +292,7 @@ abstract class GenMSIL extends SubComponent {
tBuilder.SetCustomAttribute(SYMTAB_ATTRIBUTE_CONSTRUCTOR, symtab)
currentRun.symData -= sym
- currentRun.symData -= sym.linkedSym
+ currentRun.symData -= sym.companionSymbol
case _ =>
addMarker()
@@ -326,7 +325,7 @@ abstract class GenMSIL extends SubComponent {
annType.CreateType() // else, GetConstructors can't be used
val constr: ConstructorInfo = annType.GetConstructors()(0)
// prevent a second call of CreateType, only needed because there's no
- // otehr way than GetConstructors()(0) to get the constructor, if there's
+ // other way than GetConstructors()(0) to get the constructor, if there's
// no constructor symbol available.
val args: Array[Byte] =
@@ -470,7 +469,7 @@ abstract class GenMSIL extends SubComponent {
}
private def createTypes() {
- for (sym <- classes.keysIterator) {
+ for (sym <- classes.keys) {
val iclass = classes(sym)
val tBuilder = types(sym).asInstanceOf[TypeBuilder]
@@ -514,11 +513,11 @@ abstract class GenMSIL extends SubComponent {
tBuilder.setPosition(line, iclass.cunit.source.file.name)
if (isTopLevelModule(sym)) {
- if (sym.linkedClassOfModule == NoSymbol)
+ if (sym.companionClass == NoSymbol)
dumpMirrorClass(sym)
else
log("No mirror class for module with linked class: " +
- sym.fullNameString)
+ sym.fullName)
}
addSymtabAttribute(sym, tBuilder)
@@ -601,10 +600,19 @@ abstract class GenMSIL extends SubComponent {
genBlocks(linearization)
+ // RETURN inside exception blocks are replaced by Leave. The target of the
+ // leave is a `Ret` outside any exception block (generated here).
+ if (handlerReturnMethod == m) {
+ mcode.MarkLabel(handlerReturnLabel)
+ if (handlerReturnKind != UNIT)
+ mcode.Emit(OpCodes.Ldloc, handlerReturnLocal)
+ mcode.Emit(OpCodes.Ret)
+ }
+
beginExBlock.clear()
beginCatchBlock.clear()
endExBlock.clear()
- omitJumpBlocks.clear()
+ endFinallyLabels.clear()
}
def genBlocks(blocks: List[BasicBlock], previous: BasicBlock = null) {
@@ -622,15 +630,46 @@ abstract class GenMSIL extends SubComponent {
val beginCatchBlock = new HashMap[BasicBlock, ExceptionHandler]()
val endExBlock = new HashMap[BasicBlock, List[ExceptionHandler]]()
- // at the end of a try or catch block, the jumps must not be emitted.
- // the automatically generated leave will do the job.
- val omitJumpBlocks: HashSet[BasicBlock] = new HashSet()
+ /** When emitting the code (genBlock), the number of currently active try / catch
+ * blocks. When seeing a `RETURN' inside a try / catch, we need to
+ * - store the result in a local (if it's not UNIT)
+ * - emit `Leave handlerReturnLabel` instead of the Return
+ * - emit code at the end: load the local and return its value
+ */
+ var currentHandlers = new Stack[ExceptionHandler]
+ // The IMethod the Local/Label/Kind below belong to
+ var handlerReturnMethod: IMethod = _
+ // Stores the result when returning inside an exception block
+ var handlerReturnLocal: LocalBuilder = _
+ // Label for a return instruction outside any exception block
+ var handlerReturnLabel: Label = _
+ // The result kind.
+ var handlerReturnKind: TypeKind = _
+ def returnFromHandler(kind: TypeKind): (LocalBuilder, Label) = {
+ if (handlerReturnMethod != method) {
+ handlerReturnMethod = method
+ if (kind != UNIT) {
+ handlerReturnLocal = mcode.DeclareLocal(msilType(kind))
+ handlerReturnLocal.SetLocalSymInfo("$handlerReturn")
+ }
+ handlerReturnLabel = mcode.DefineLabel()
+ handlerReturnKind = kind
+ }
+ (handlerReturnLocal, handlerReturnLabel)
+ }
+
+ /** For try/catch nested inside a finally, we can't use `Leave OutsideFinally`, the
+ * Leave target has to be inside the finally (and it has to be the `endfinally` instruction).
+ * So for every finalizer, we have a label which marks the place of the `endfinally`,
+ * nested try/catch blocks will leave there.
+ */
+ val endFinallyLabels = new HashMap[ExceptionHandler, Label]()
/** Computes which blocks are the beginning / end of a try or catch block */
private def computeExceptionMaps(blocks: List[BasicBlock], m: IMethod): List[BasicBlock] = {
val visitedBlocks = new HashSet[BasicBlock]()
- // handlers which have not been intruduced so far
+ // handlers which have not been introduced so far
var openHandlers = m.exh
@@ -661,8 +700,6 @@ abstract class GenMSIL extends SubComponent {
// tail is all following catch blocks. Example *2*: Stack(List(h3), List(h4, h5))
val currentCatchHandlers = new Stack[List[ExceptionHandler]]()
- var prev: BasicBlock = null
-
for (b <- blocks) {
// are we past the current catch blocks?
@@ -676,12 +713,12 @@ abstract class GenMSIL extends SubComponent {
"Bad linearization of basic blocks inside catch. Found block not part of the handler\n"+
b.fullString +"\nwhile in catch-part of\n"+ handler)
- omitJumpBlocks += prev
-
val rest = currentCatchHandlers.pop.tail
if (rest.isEmpty) {
+ // all catch blocks of that exception handler are covered
res = handler :: endHandlers()
} else {
+ // there are more catch blocks for that try (handlers covering the same)
currentCatchHandlers.push(rest)
beginCatchBlock(b) = rest.head
}
@@ -708,7 +745,6 @@ abstract class GenMSIL extends SubComponent {
val handlers = currentTryHandlers.pop
currentCatchHandlers.push(handlers)
beginCatchBlock(b) = handler
- omitJumpBlocks += prev
}
}
@@ -719,7 +755,7 @@ abstract class GenMSIL extends SubComponent {
val newHandlersBySize = newHandlers.groupBy(_.covered.size)
// big handlers first, smaller ones are nested inside the try of the big one
// (checked by the assertions below)
- val sizes = newHandlersBySize.keysIterator.toList.sortWith(_ > _)
+ val sizes = newHandlersBySize.keys.toList.sortWith(_ > _)
val beginHandlers = new ListBuffer[ExceptionHandler]
for (s <- sizes) {
@@ -745,7 +781,6 @@ abstract class GenMSIL extends SubComponent {
}
beginExBlock(b) = beginHandlers.toList
visitedBlocks += b
- prev = b
}
// if there handlers left (i.e. handlers covering nothing, or a
@@ -769,7 +804,6 @@ abstract class GenMSIL extends SubComponent {
if (rest.isEmpty) {
liveBlocks
} else {
- omitJumpBlocks += prev
val b = m.code.newBlock
b.emit(Seq(
NEW(REFERENCE(definitions.ThrowableClass)),
@@ -796,20 +830,27 @@ abstract class GenMSIL extends SubComponent {
* - load arguments
* - NewObj(constructor) => reference on stack
*
- * This variable tells wether the previous instruction was a NEW,
+ * This variable tells whether the previous instruction was a NEW,
* we expect a DUP which is not emitted. */
var previousWasNEW = false
var lastLineNr: Int = 0
- mcode.MarkLabel(labels(block))
-
- if (settings.debug.value)
- log("Generating code for block: " + block)
+ // EndExceptionBlock must happen before MarkLabel because it adds the
+ // Leave instruction. Otherwise, labels(block) points to the Leave
+ // (inside the catch) instead of the instruction afterwards.
for (handlers <- endExBlock.get(block); exh <- handlers) {
+ currentHandlers.pop()
+ for (l <- endFinallyLabels.get(exh))
+ mcode.MarkLabel(l)
mcode.EndExceptionBlock()
}
+
+ mcode.MarkLabel(labels(block))
+ if (settings.debug.value)
+ log("Generating code for block: " + block)
+
for (handler <- beginCatchBlock.get(block)) {
if (handler.cls == NoSymbol) {
// `finally` blocks are represented the same as `catch`, but with no catch-type
@@ -820,6 +861,7 @@ abstract class GenMSIL extends SubComponent {
}
}
for (handlers <- beginExBlock.get(block); exh <- handlers) {
+ currentHandlers.push(exh)
mcode.BeginExceptionBlock()
}
@@ -875,9 +917,7 @@ abstract class GenMSIL extends SubComponent {
case FLOAT => mcode.Emit(OpCodes.Ldelem_R4)
case DOUBLE => mcode.Emit(OpCodes.Ldelem_R8)
case REFERENCE(cls) => mcode.Emit(OpCodes.Ldelem_Ref)
-
- // case ARRAY(elem) is not possible, for Array[Array[Int]], the
- // load will be case REFERENCE(java.lang.Object)
+ case ARRAY(elem) => mcode.Emit(OpCodes.Ldelem_Ref)
// case UNIT is not possible: an Array[Unit] will be an
// Array[scala.runtime.BoxedUnit] (-> case REFERENCE)
@@ -922,8 +962,9 @@ abstract class GenMSIL extends SubComponent {
case FLOAT => mcode.Emit(OpCodes.Stelem_R4)
case DOUBLE => mcode.Emit(OpCodes.Stelem_R8)
case REFERENCE(cls) => mcode.Emit(OpCodes.Stelem_Ref)
+ case ARRAY(elem) => mcode.Emit(OpCodes.Stelem_Ref) // @TODO: test this! (occurs when calling a Array[Object]* vararg param method)
- // case UNIT / ARRRAY are not possible (see comment at LOAD_ARRAY_ITEM)
+ // case UNIT not possible (see comment at LOAD_ARRAY_ITEM)
}
case STORE_LOCAL(local) =>
@@ -1131,7 +1172,7 @@ abstract class GenMSIL extends SubComponent {
// if the int on stack is 4, and 4 is in the second list => jump
// to second label
// branches is List[BasicBlock]
- // the labels to jump to (the last one ist the default one)
+ // the labels to jump to (the last one is the default one)
val switchLocal = mcode.DeclareLocal(MINT)
// several switch variables will appear with the same name in the
@@ -1150,12 +1191,18 @@ abstract class GenMSIL extends SubComponent {
i += 1
}
val defaultTarget = labels(branches(i))
- if (next != defaultTarget && !omitJumpBlocks.contains(block))
+ if (next != defaultTarget)
mcode.Emit(OpCodes.Br, defaultTarget)
-
case JUMP(whereto) =>
- if (next != whereto && !omitJumpBlocks.contains(block))
+ val (leaveHandler, leaveFinally, lfTarget) = leavesHandler(block, whereto)
+ if (leaveHandler) {
+ if (leaveFinally) {
+ if (lfTarget.isDefined) mcode.Emit(OpCodes.Leave, lfTarget.get)
+ else mcode.Emit(OpCodes.Endfinally)
+ } else
+ mcode.Emit(OpCodes.Leave, labels(whereto))
+ } else if (next != whereto)
mcode.Emit(OpCodes.Br, labels(whereto))
case CJUMP(success, failure, cond, kind) =>
@@ -1163,30 +1210,21 @@ abstract class GenMSIL extends SubComponent {
// values EQ, NE, LT, GE LE, GT
// kind is TypeKind
val isFloat = kind == FLOAT || kind == DOUBLE
- if (next == success || omitJumpBlocks.contains(block)) {
- emitBr(cond.negate, labels(failure), isFloat)
- } else {
- emitBr(cond, labels(success), isFloat)
- if (next != failure && !omitJumpBlocks.contains(block)) {
- mcode.Emit(OpCodes.Br, labels(failure))
- }
- }
+ val emit = (c: TestOp, l: Label) => emitBr(c, l, isFloat)
+ emitCondBr(block, cond, success, failure, next, emit)
case CZJUMP(success, failure, cond, kind) =>
- (kind: @unchecked) match {
- case BOOL | REFERENCE(_) =>
- if (next == success || omitJumpBlocks.contains(block)) {
- emitBrBool(cond.negate, labels(failure))
- } else {
- emitBrBool(cond, labels(success))
- if (next != failure && !omitJumpBlocks.contains(block)) {
- mcode.Emit(OpCodes.Br, labels(failure))
- }
- }
- }
+ emitCondBr(block, cond, success, failure, next, emitBrBool(_, _))
case RETURN(kind) =>
- mcode.Emit(OpCodes.Ret)
+ if (currentHandlers.isEmpty)
+ mcode.Emit(OpCodes.Ret)
+ else {
+ val (local, label) = returnFromHandler(kind)
+ if (kind != UNIT)
+ mcode.Emit(OpCodes.Stloc, local)
+ mcode.Emit(OpCodes.Leave, label)
+ }
case THROW() =>
mcode.Emit(OpCodes.Throw)
@@ -1335,8 +1373,85 @@ abstract class GenMSIL extends SubComponent {
code.Emit(OpCodes.Ldloc, localBuilders(local))
}
- ////////////////////// labels ///////////////////////
+ ////////////////////// branches ///////////////////////
+
+ /** Returns a Triple (Boolean, Boolean, Option[Label])
+ * - wether the jump leaves some exception block (try / catch / finally)
+ * - wether the it leaves a finally handler (finally block, but not it's try / catch)
+ * - a label where to jump for leaving the finally handler
+ * . None to leave directly using `endfinally`
+ * . Some(label) to emit `leave label` (for try / catch inside a finally handler)
+ */
+ def leavesHandler(from: BasicBlock, to: BasicBlock): (Boolean, Boolean, Option[Label]) =
+ if (currentHandlers.isEmpty) (false, false, None)
+ else {
+ val h = currentHandlers.head
+ val leaveHead = { h.covers(from) != h.covers(to) ||
+ h.blocks.contains(from) != h.blocks.contains(to) }
+ if (leaveHead) {
+ // we leave the innermost exception block.
+ // find out if we also leave som e `finally` handler
+ currentHandlers.find(e => {
+ e.cls == NoSymbol && e.blocks.contains(from) != e.blocks.contains(to)
+ }) match {
+ case Some(finallyHandler) =>
+ if (h == finallyHandler) {
+ // the finally handler is the innermost, so we can emit `endfinally` directly
+ (true, true, None)
+ } else {
+ // we need to `Leave` to the `endfinally` of the next outer finally handler
+ val l = endFinallyLabels.getOrElseUpdate(finallyHandler, mcode.DefineLabel())
+ (true, true, Some(l))
+ }
+ case None =>
+ (true, false, None)
+ }
+ } else (false, false, None)
+ }
+
+ def emitCondBr(block: BasicBlock, cond: TestOp, success: BasicBlock, failure: BasicBlock,
+ next: BasicBlock, emitBrFun: (TestOp, Label) => Unit) {
+ val (sLeaveHandler, sLeaveFinally, slfTarget) = leavesHandler(block, success)
+ val (fLeaveHandler, fLeaveFinally, flfTarget) = leavesHandler(block, failure)
+
+ if (sLeaveHandler || fLeaveHandler) {
+ val sLabelOpt = if (sLeaveHandler) {
+ val leaveSLabel = mcode.DefineLabel()
+ emitBrFun(cond, leaveSLabel)
+ Some(leaveSLabel)
+ } else {
+ emitBrFun(cond, labels(success))
+ None
+ }
+ if (fLeaveHandler) {
+ if (fLeaveFinally) {
+ if (flfTarget.isDefined) mcode.Emit(OpCodes.Leave, flfTarget.get)
+ else mcode.Emit(OpCodes.Endfinally)
+ } else
+ mcode.Emit(OpCodes.Leave, labels(failure))
+ } else
+ mcode.Emit(OpCodes.Br, labels(failure))
+
+ sLabelOpt.map(l => {
+ mcode.MarkLabel(l)
+ if (sLeaveFinally) {
+ if (slfTarget.isDefined) mcode.Emit(OpCodes.Leave, slfTarget.get)
+ else mcode.Emit(OpCodes.Endfinally)
+ } else
+ mcode.Emit(OpCodes.Leave, labels(success))
+ })
+ } else {
+ if (next == success) {
+ emitBrFun(cond.negate, labels(failure))
+ } else {
+ emitBrFun(cond, labels(success))
+ if (next != failure) {
+ mcode.Emit(OpCodes.Br, labels(failure))
+ }
+ }
+ }
+ }
def emitBr(condition: TestOp, dest: Label, isFloat: Boolean) {
condition match {
@@ -1352,7 +1467,7 @@ abstract class GenMSIL extends SubComponent {
def emitBrBool(cond: TestOp, dest: Label) {
cond match {
// EQ -> Brfalse, NE -> Brtrue; this is because we come from
- // a CZJUMP. If the value on the stack is 0 (e.g. a boolen
+ // a CZJUMP. If the value on the stack is 0 (e.g. a boolean
// method returned false), and we are in the case EQ, then
// we need to emit Brfalse (EQ Zero means false). vice versa
case EQ => mcode.Emit(OpCodes.Brfalse, dest)
@@ -1415,9 +1530,10 @@ abstract class GenMSIL extends SubComponent {
else if (sym == definitions.NullClass)
return "scala.runtime.Null$"
- (if (sym.isClass || (sym.isModule && !sym.isMethod))
- sym.fullNameString
- else
+ (if (sym.isClass || (sym.isModule && !sym.isMethod)) {
+ if (sym.isNestedClass) sym.simpleName
+ else sym.fullName
+ } else
sym.simpleName.toString().trim()) + suffix
}
@@ -1483,7 +1599,7 @@ abstract class GenMSIL extends SubComponent {
if (sym.isStaticMember)
mf = mf | FieldAttributes.Static
- // TRANSIENT: "not nerialized", VOLATILE: doesn't exist on .net
+ // TRANSIENT: "not serialized", VOLATILE: doesn't exist on .net
// TODO: add this annotation also if the class has the custom attribute
// System.NotSerializedAttribute
sym.annotations.foreach( a => a match {
@@ -1547,7 +1663,14 @@ abstract class GenMSIL extends SubComponent {
case FLOAT => MFLOAT
case DOUBLE => MDOUBLE
case REFERENCE(cls) => getType(cls)
- case ARRAY(elem) => clrTypes.mkArrayType(msilType(elem))
+ case ARRAY(elem) =>
+ msilType(elem) match {
+ // For type builders, cannot call "clrTypes.mkArrayType" because this looks up
+ // the type "tp" in the assembly (not in the HashMap "types" of the backend).
+ // This can fail for nested types because the biulders are not complete yet.
+ case tb: TypeBuilder => tb.MakeArrayType()
+ case tp: MsilType => clrTypes.mkArrayType(tp)
+ }
}
private def msilType(tpe: Type): MsilType = msilType(toTypeKind(tpe))
@@ -1561,13 +1684,13 @@ abstract class GenMSIL extends SubComponent {
case None =>
def typeString(sym: Symbol): String = {
val s = if (sym.isNestedClass) typeString(sym.owner) +"+"+ sym.simpleName
- else sym.fullNameString
+ else sym.fullName
if (sym.isModuleClass && !sym.isTrait) s + "$" else s
}
val name = typeString(sym)
val typ = clrTypes.getType(name)
if (typ == null)
- throw new Error(showsym(sym) + " with name " + name)
+ abort(showsym(sym) + " with name " + name)
else {
clrTypes.types(sym) = typ
typ
@@ -1593,7 +1716,7 @@ abstract class GenMSIL extends SubComponent {
def isInterface(s: Symbol) = s.isTrait && !s.isImplClass
val parents: List[Type] =
if (sym.info.parents.isEmpty) List(definitions.ObjectClass.tpe)
- else sym.info.parents.removeDuplicates
+ else sym.info.parents.distinct
val superType = if (isInterface(sym)) null else msilTypeFromSym(parents.head.typeSymbol)
if (settings.debug.value)
@@ -1639,7 +1762,7 @@ abstract class GenMSIL extends SubComponent {
for (ifield <- iclass.fields) {
val sym = ifield.symbol
if (settings.debug.value)
- log("Adding field: " + sym.fullNameString)
+ log("Adding field: " + sym.fullName)
var attributes = msilFieldFlags(sym)
val fBuilder = mtype.DefineField(msilName(sym), msilType(sym.tpe), attributes)
@@ -1652,7 +1775,7 @@ abstract class GenMSIL extends SubComponent {
val sym = m.symbol
if (settings.debug.value)
log("Creating MethodBuilder for " + Flags.flagsToString(sym.flags) + " " +
- sym.owner.fullNameString + "::" + sym.name)
+ sym.owner.fullName + "::" + sym.name)
val ownerType = getType(sym.enclClass).asInstanceOf[TypeBuilder]
assert(mtype == ownerType, "mtype = " + mtype + "; ownerType = " + ownerType)
@@ -1739,7 +1862,7 @@ abstract class GenMSIL extends SubComponent {
case Some(sym) => sym
case None =>
//val mclass = types(moduleClassSym)
- val mClass = clrTypes.getType(moduleClassSym.fullNameString + "$")
+ val mClass = clrTypes.getType(moduleClassSym.fullName + "$")
val mfield = mClass.GetField("MODULE$")
assert(mfield ne null, "module not found " + showsym(moduleClassSym))
fields(moduleClassSym) = mfield
@@ -1751,7 +1874,7 @@ abstract class GenMSIL extends SubComponent {
/** Adds a static initializer which creates an instance of the module
* class (calls the primary constructor). A special primary constructor
- * will be generated (notInitializedModules) which stores the new intance
+ * will be generated (notInitializedModules) which stores the new instance
* in the MODULE$ field right after the super call.
*/
private def addStaticInit(sym: Symbol) {
@@ -1923,7 +2046,7 @@ abstract class GenMSIL extends SubComponent {
if (constr eq null) {
System.out.println("Cannot find constructor " + sym.owner + "::" + sym.name)
System.out.println("scope = " + sym.owner.tpe.decls)
- throw new Error(sym.fullNameString)
+ abort(sym.fullName)
}
else {
mapConstructor(sym, constr)
@@ -1957,7 +2080,7 @@ abstract class GenMSIL extends SubComponent {
if (method eq null) {
System.out.println("Cannot find method " + sym.owner + "::" + msilName(sym))
System.out.println("scope = " + sym.owner.tpe.decls)
- throw new Error(sym.fullNameString)
+ abort(sym.fullName)
}
else {
mapMethod(sym, method)
@@ -2008,7 +2131,7 @@ abstract class GenMSIL extends SubComponent {
}
/*
- * add maping for member with name and paramTypes to member
+ * add mapping for member with name and paramTypes to member
* newName of newClass (same parameters)
*/
private def mapMethod(
diff --git a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
index b05061b0f6..7311978147 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
@@ -9,7 +9,7 @@ package scala.tools.nsc
package backend.opt;
import scala.collection.mutable.{Map, HashMap};
-import scala.tools.nsc.backend.icode.analysis.LubError;
+import scala.tools.nsc.backend.icode.analysis.LubException;
import scala.tools.nsc.symtab._;
/**
@@ -37,7 +37,7 @@ abstract class ClosureElimination extends SubComponent {
}
/**
- * Remove references to the environemnt through fields of a closure object.
+ * Remove references to the environment through fields of a closure object.
* This has to be run after an 'apply' method has been inlined, but it still
* references the closure object.
*
@@ -181,7 +181,7 @@ abstract class ClosureElimination extends SubComponent {
}
}
}} catch {
- case e: LubError =>
+ case e: LubException =>
Console.println("In method: " + m)
Console.println(e)
e.printStackTrace
diff --git a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
index 3fdfc62b8e..29d0c37c84 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
@@ -10,7 +10,6 @@ package backend.opt
import scala.collection._
import scala.collection.immutable.{Map, HashMap, Set, HashSet}
-import scala.tools.nsc.backend.icode.analysis.LubError
import scala.tools.nsc.symtab._
/**
@@ -82,7 +81,7 @@ abstract class DeadCodeElimination extends SubComponent {
collectRDef(m)
mark
sweep(m)
- accessedLocals = accessedLocals.removeDuplicates
+ accessedLocals = accessedLocals.distinct
if (m.locals diff accessedLocals nonEmpty) {
log("Removed dead locals: " + (m.locals diff accessedLocals))
m.locals = accessedLocals.reverse
@@ -131,7 +130,7 @@ abstract class DeadCodeElimination extends SubComponent {
}
/** Mark useful instructions. Instructions in the worklist are each inspected and their
- * dependecies are marked useful too, and added to the worklist.
+ * dependencies are marked useful too, and added to the worklist.
*/
def mark {
// log("Starting with worklist: " + worklist)
@@ -218,7 +217,7 @@ abstract class DeadCodeElimination extends SubComponent {
private def computeCompensations(m: IMethod): mutable.Map[(BasicBlock, Int), List[Instruction]] = {
val compensations: mutable.Map[(BasicBlock, Int), List[Instruction]] = new mutable.HashMap
- for (bb <- m.code.blocks.toList) {
+ for (bb <- m.code.blocks) {
assert(bb.closed, "Open block in computeCompensations")
for ((i, idx) <- bb.toList.zipWithIndex) {
if (!useful(bb)(idx)) {
@@ -226,8 +225,20 @@ abstract class DeadCodeElimination extends SubComponent {
log("Finding definitions of: " + i + "\n\t" + consumedType + " at depth: " + depth)
val defs = rdef.findDefs(bb, idx, 1, depth)
for (d <- defs) {
- if (!compensations.isDefinedAt(d))
- compensations(d) = List(DROP(consumedType))
+ val (bb, idx) = d
+ bb(idx) match {
+ case DUP(_) if idx > 0 =>
+ bb(idx - 1) match {
+ case nw @ NEW(_) =>
+ val init = findInstruction(bb, nw.init)
+ log("Moving DROP to after <init> call: " + nw.init)
+ compensations(init) = List(DROP(consumedType))
+ case _ =>
+ compensations(d) = List(DROP(consumedType))
+ }
+ case _ =>
+ compensations(d) = List(DROP(consumedType))
+ }
}
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
index 016e70a968..cb87301b6a 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
@@ -104,7 +104,7 @@ abstract class Inliners extends SubComponent {
}
val instrAfter = block.toList.drop(instrBefore.length + 1);
- assert(!instrAfter.isEmpty, "CALL_METHOD cannot be the last instrcution in block!");
+ assert(!instrAfter.isEmpty, "CALL_METHOD cannot be the last instruction in block!");
// store the '$this' into the special local
val inlinedThis = new Local(caller.symbol.newVariable(instr.pos, freshName("$inlThis")), REFERENCE(definitions.ObjectClass), false);
@@ -328,10 +328,10 @@ abstract class Inliners extends SubComponent {
if (receiver != msym.owner && receiver != NoSymbol) {
if (settings.debug.value)
log("" + i + " has actual receiver: " + receiver);
- if (!concreteMethod.isFinal && receiver.isFinal) {
+ if (!concreteMethod.isEffectivelyFinal && receiver.isFinal) {
concreteMethod = lookupImpl(concreteMethod, receiver)
if (settings.debug.value)
- log("\tlooked up method: " + concreteMethod.fullNameString)
+ log("\tlooked up method: " + concreteMethod.fullName)
}
}
@@ -342,11 +342,11 @@ abstract class Inliners extends SubComponent {
log("Treating " + i
+ "\n\treceiver: " + receiver
+ "\n\ticodes.available: " + icodes.available(receiver)
- + "\n\tconcreteMethod.isFinal: " + concreteMethod.isFinal);
+ + "\n\tconcreteMethod.isEffectivelyFinal: " + concreteMethod.isFinal);
if ( icodes.available(receiver)
&& (isClosureClass(receiver)
- || concreteMethod.isFinal
+ || concreteMethod.isEffectivelyFinal
|| receiver.isFinal)) {
icodes.icode(receiver).get.lookupMethod(concreteMethod) match {
case Some(inc) =>
@@ -387,7 +387,7 @@ abstract class Inliners extends SubComponent {
}
info = tfa.interpret(info, i)
}}}
- if (tfa.stat) log(m.symbol.fullNameString + " iterations: " + tfa.iterations + " (size: " + m.code.blocks.length + ")")
+ if (tfa.stat) log(m.symbol.fullName + " iterations: " + tfa.iterations + " (size: " + m.code.blocks.length + ")")
}} while (retry && count < 15)
m.normalize
}
@@ -402,7 +402,7 @@ abstract class Inliners extends SubComponent {
/** Should the given method be loaded from disk? */
def shouldLoad(receiver: Symbol, method: Symbol): Boolean = {
if (settings.debug.value) log("shouldLoad: " + receiver + "." + method)
- ((method.isFinal && isMonadMethod(method) && isHigherOrderMethod(method))
+ ((method.isEffectivelyFinal && isMonadMethod(method) && isHigherOrderMethod(method))
|| (receiver.enclosingPackage == definitions.ScalaRunTimeModule.enclosingPackage)
|| (receiver == definitions.PredefModule.moduleClass)
|| (method.hasAnnotation(ScalaInlineAttr)))
@@ -488,7 +488,7 @@ abstract class Inliners extends SubComponent {
}
private def lookupImpl(meth: Symbol, clazz: Symbol): Symbol = {
- //println("\t\tlooking up " + meth + " in " + clazz.fullNameString + " meth.owner = " + meth.owner)
+ //println("\t\tlooking up " + meth + " in " + clazz.fullName + " meth.owner = " + meth.owner)
if (meth.owner == clazz
|| clazz == definitions.NullClass
|| clazz == definitions.NothingClass) meth
@@ -544,13 +544,10 @@ abstract class Inliners extends SubComponent {
}
} /* class Inliner */
- /** Is the given class a subtype of a function trait? */
+ /** Is the given class a closure? */
def isClosureClass(cls: Symbol): Boolean = {
- val res = cls.isFinal && cls.hasFlag(Flags.SYNTHETIC) && !cls.isModuleClass &&
- cls.tpe.parents.exists { t =>
- val TypeRef(_, sym, _) = t;
- definitions.FunctionClass exists sym.==
- }
+ val res = (cls.isFinal && cls.hasFlag(Flags.SYNTHETIC)
+ && !cls.isModuleClass && cls.isAnonymousFunction)
res
}
diff --git a/src/compiler/scala/tools/nsc/dependencies/Changes.scala b/src/compiler/scala/tools/nsc/dependencies/Changes.scala
index 43efd0726b..6aef637902 100644
--- a/src/compiler/scala/tools/nsc/dependencies/Changes.scala
+++ b/src/compiler/scala/tools/nsc/dependencies/Changes.scala
@@ -18,6 +18,9 @@ abstract class Changes {
abstract class Change
+ private lazy val annotationsChecked =
+ List(definitions.getClass("scala.specialized")) // Any others that should be checked?
+
/** Are the new modifiers more restrictive than the old ones? */
private def moreRestrictive(from: Long, to: Long): Boolean =
((((to & PRIVATE) != 0L) && (from & PRIVATE) == 0L)
@@ -36,20 +39,26 @@ abstract class Changes {
case class Changed(e: Entity)(implicit val reason: String) extends Change {
override def toString = "Changed(" + e + ")[" + reason + "]"
}
+ case class ParentChanged(e: Entity) extends Change
+
+ private val changedTypeParams = new mutable.HashSet[String]
- private def sameSymbol(sym1: Symbol, sym2: Symbol): Boolean =
- sym1.fullNameString == sym2.fullNameString
+ private def sameSymbol(sym1: Symbol, sym2: Symbol, simple: Boolean = false): Boolean =
+ if (simple) sym1.encodedName == sym2.encodedName else sym1.fullName == sym2.fullName
private def sameFlags(sym1: Symbol, sym2: Symbol): Boolean =
sym1.flags == sym2.flags
+ private def sameAnnotations(sym1: Symbol, sym2: Symbol): Boolean =
+ annotationsChecked.forall(a =>
+ (sym1.hasAnnotation(a) == sym2.hasAnnotation(a)))
- private def sameType(tp1: Type, tp2: Type) = {
+ private def sameType(tp1: Type, tp2: Type)(implicit strict: Boolean) = {
def typeOf(tp: Type): String = tp.toString + "[" + tp.getClass + "]"
val res = sameType0(tp1, tp2)
//if (!res) println("\t different types: " + typeOf(tp1) + " : " + typeOf(tp2))
res
}
- private def sameType0(tp1: Type, tp2: Type): Boolean = ((tp1, tp2) match {
+ private def sameType0(tp1: Type, tp2: Type)(implicit strict: Boolean): Boolean = ((tp1, tp2) match {
/*case (ErrorType, _) => false
case (WildcardType, _) => false
case (_, ErrorType) => false
@@ -68,9 +77,18 @@ abstract class Changes {
case (ConstantType(value1), ConstantType(value2)) =>
value1 == value2
case (TypeRef(pre1, sym1, args1), TypeRef(pre2, sym2, args2)) =>
- sameType(pre1, pre2) && sameSymbol(sym1, sym2) &&
- ((tp1.isHigherKinded && tp2.isHigherKinded && tp1.normalize =:= tp2.normalize) ||
- sameTypes(args1, args2))
+ val testSymbols =
+ if (!sameSymbol(sym1, sym2)) {
+ val v = (!strict && sym1.isType && sym2.isType && sameType(sym1.info, sym2.info))
+ if (v) changedTypeParams += sym1.fullName
+ v
+ } else
+ !sym1.isTypeParameter || !changedTypeParams.contains(sym1.fullName)
+
+ testSymbols && sameType(pre1, pre2) &&
+ (sym1.variance == sym2.variance) &&
+ ((tp1.isHigherKinded && tp2.isHigherKinded && tp1.normalize =:= tp2.normalize) ||
+ sameTypes(args1, args2))
// @M! normalize reduces higher-kinded case to PolyType's
case (RefinedType(parents1, ref1), RefinedType(parents2, ref2)) =>
@@ -87,31 +105,28 @@ abstract class Changes {
}
}
sameTypes(parents1, parents2) && isSubScope(ref1, ref2) && isSubScope(ref2, ref1)
-
- case (MethodType(params1, res1), MethodType(params2, res2)) =>
+ case (mt1 @ MethodType(params1, res1), mt2 @ MethodType(params2, res2)) =>
// new dependent types: probably fix this, use substSym as done for PolyType
- (sameTypes(tp1.paramTypes, tp2.paramTypes) &&
- ((tp1.params, tp2.params).zipped forall ((t1, t2) =>
- (sameSymbol(t1, t2) && sameFlags(t1, t2)))) &&
+ sameTypes(tp1.paramTypes, tp2.paramTypes) &&
+ (tp1.params corresponds tp2.params)((t1, t2) => sameSymbol(t1, t2, true) && sameFlags(t1, t2)) &&
sameType(res1, res2) &&
- tp1.isInstanceOf[ImplicitMethodType] == tp2.isInstanceOf[ImplicitMethodType])
-
+ mt1.isImplicit == mt2.isImplicit
case (PolyType(tparams1, res1), PolyType(tparams2, res2)) =>
sameTypeParams(tparams1, tparams2) && sameType(res1, res2)
case (ExistentialType(tparams1, res1), ExistentialType(tparams2, res2)) =>
- sameTypeParams(tparams1, tparams2) && sameType(res1, res2)
+ sameTypeParams(tparams1, tparams2)(false) && sameType(res1, res2)(false)
case (TypeBounds(lo1, hi1), TypeBounds(lo2, hi2)) =>
sameType(lo1, lo2) && sameType(hi1, hi2)
case (BoundedWildcardType(bounds), _) =>
bounds containsType tp2
case (_, BoundedWildcardType(bounds)) =>
bounds containsType tp1
-
case (AnnotatedType(_,_,_), _) =>
- annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) && tp1.withoutAnnotations =:= tp2.withoutAnnotations
+ annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) &&
+ sameType(tp1.withoutAnnotations, tp2.withoutAnnotations)
case (_, AnnotatedType(_,_,_)) =>
- annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) && tp1.withoutAnnotations =:= tp2.withoutAnnotations
-
+ annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) &&
+ sameType(tp1.withoutAnnotations, tp2.withoutAnnotations)
case (_: SingletonType, _: SingletonType) =>
var origin1 = tp1
while (origin1.underlying.isInstanceOf[SingletonType]) {
@@ -132,25 +147,28 @@ abstract class Changes {
((tp1n ne tp1) || (tp2n ne tp2)) && sameType(tp1n, tp2n)
}
- private def sameTypeParams(tparams1: List[Symbol], tparams2: List[Symbol]) =
+ private def sameTypeParams(tparams1: List[Symbol], tparams2: List[Symbol])(implicit strict: Boolean) =
sameTypes(tparams1 map (_.info), tparams2 map (_.info)) &&
- sameTypes(tparams1 map (_.tpe), tparams2 map (_.tpe))
+ sameTypes(tparams1 map (_.tpe), tparams2 map (_.tpe)) &&
+ (tparams1 corresponds tparams2)((t1, t2) => sameAnnotations(t1, t2))
- def sameTypes(tps1: List[Type], tps2: List[Type]): Boolean =
- (tps1.length == tps2.length) && ((tps1, tps2).zipped forall sameType)
+ private def sameTypes(tps1: List[Type], tps2: List[Type])(implicit strict: Boolean) =
+ (tps1 corresponds tps2)(sameType(_, _))
/** Return the list of changes between 'from' and 'toSym.info'.
*/
def changeSet(from: Type, toSym: Symbol): List[Change] = {
implicit val defaultReason = "types"
+ implicit val defaultStrictTypeRefTest = true
val to = toSym.info
+ changedTypeParams.clear
def omitSymbols(s: Symbol): Boolean = !s.hasFlag(LOCAL | LIFTED | PRIVATE)
val cs = new mutable.ListBuffer[Change]
if ((from.parents zip to.parents) exists { case (t1, t2) => !sameType(t1, t2) })
cs += Changed(toEntity(toSym))(from.parents.zip(to.parents).toString)
- if (!sameTypeParams(from.typeParams, to.typeParams))
+ if (!sameTypeParams(from.typeParams, to.typeParams)(false))
cs += Changed(toEntity(toSym))(" tparams: " + from.typeParams.zip(to.typeParams))
// new members not yet visited
@@ -185,8 +203,9 @@ abstract class Changes {
}
def removeChangeSet(sym: Symbol): Change = Removed(toEntity(sym))
def changeChangeSet(sym: Symbol, msg: String): Change = Changed(toEntity(sym))(msg)
+ def parentChangeSet(sym: Symbol): Change = ParentChanged(toEntity(sym))
private def toEntity(sym: Symbol): Entity =
- if (sym.isClass) Class(sym.fullNameString)
- else Definition(sym.fullNameString)
+ if (sym.isClass) Class(sym.fullName)
+ else Definition(sym.fullName)
}
diff --git a/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala b/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala
index 5ce0cfdb1c..c84e608bb7 100644
--- a/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala
@@ -57,6 +57,12 @@ trait DependencyAnalysis extends SubComponent with Files {
override def default(f : AbstractFile) = immutable.Set()
}
+ /** External references for inherited members used in the source file */
+ val inherited: mutable.Map[AbstractFile, immutable.Set[Inherited]] =
+ new mutable.HashMap[AbstractFile, immutable.Set[Inherited]] {
+ override def default(f : AbstractFile) = immutable.Set()
+ }
+
/** Write dependencies to the current file. */
def saveDependencies(fromFile: AbstractFile => String) =
if(dependenciesFile.isDefined)
@@ -106,7 +112,13 @@ trait DependencyAnalysis extends SubComponent with Files {
filtered
}
+ case class Inherited(qualifier: String, member: Name)
+
class AnalysisPhase(prev : Phase) extends StdPhase(prev){
+
+ override def cancelled(unit: CompilationUnit) =
+ super.cancelled(unit) && !unit.isJava
+
def apply(unit : global.CompilationUnit) {
val f = unit.source.file.file;
// When we're passed strings by the interpreter
@@ -122,7 +134,7 @@ trait DependencyAnalysis extends SubComponent with Files {
atPhase (currentRun.picklerPhase.next) {
!s.isImplClass && !s.isNestedClass
}
- if (isTopLevelModule && (s.linkedModuleOfClass != NoSymbol)) {
+ if (isTopLevelModule && (s.companionModule != NoSymbol)) {
dependencies.emits(source, nameToFile(unit.source.file, name))
}
dependencies.emits(source, nameToFile(unit.source.file, name + "$"))
@@ -139,6 +151,7 @@ trait DependencyAnalysis extends SubComponent with Files {
// find all external references in this compilation unit
val file = unit.source.file
references += file -> immutable.Set.empty[String]
+ inherited += file -> immutable.Set.empty[Inherited]
val buf = new mutable.ListBuffer[Symbol]
@@ -151,12 +164,16 @@ trait DependencyAnalysis extends SubComponent with Files {
&& ((tree.symbol.sourceFile eq null)
|| (tree.symbol.sourceFile.path != file.path))
&& (!tree.symbol.isClassConstructor)) {
- updateReferences(tree.symbol.fullNameString)
+ updateReferences(tree.symbol.fullName)
+ atPhase(currentRun.uncurryPhase.prev) {
+ checkType(tree.symbol.tpe)
+ }
}
tree match {
- case cdef: ClassDef if !cdef.symbol.hasFlag(Flags.PACKAGE) =>
- buf += cdef.symbol
+ case cdef: ClassDef if !cdef.symbol.hasFlag(Flags.PACKAGE) &&
+ !cdef.symbol.isAnonymousFunction =>
+ if (cdef.symbol != NoSymbol) buf += cdef.symbol
atPhase(currentRun.erasurePhase.prev) {
for (s <- cdef.symbol.info.decls)
s match {
@@ -172,7 +189,13 @@ trait DependencyAnalysis extends SubComponent with Files {
checkType(ddef.symbol.tpe)
}
super.traverse(tree)
-
+ case a @ Select(q, n) if ((a.symbol != NoSymbol) && (q.symbol != null)) => // #2556
+ if (!a.symbol.isConstructor &&
+ !a.symbol.owner.isPackageClass &&
+ !isSameType(q.tpe, a.symbol.owner.tpe))
+ inherited += file ->
+ (inherited(file) + Inherited(q.symbol.tpe.resultType.safeToString, n))
+ super.traverse(tree)
case _ =>
super.traverse(tree)
}
@@ -185,11 +208,19 @@ trait DependencyAnalysis extends SubComponent with Files {
for (s <- t.params) checkType(s.tpe)
case t: TypeRef =>
- updateReferences(t.typeSymbol.fullNameString)
+ if (t.sym.isAliasType) {
+ updateReferences(t.typeSymbolDirect.fullName)
+ checkType(t.typeSymbolDirect.info)
+ }
+ updateReferences(t.typeSymbol.fullName)
for (tp <- t.args) checkType(tp)
+ case t: PolyType =>
+ checkType(t.resultType)
+ updateReferences(t.typeSymbol.fullName)
+
case t =>
- updateReferences(t.typeSymbol.fullNameString)
+ updateReferences(t.typeSymbol.fullName)
}
def updateReferences(s: String): Unit =
diff --git a/src/compiler/scala/tools/nsc/doc/DocFactory.scala b/src/compiler/scala/tools/nsc/doc/DocFactory.scala
index b70d8c10ec..ca72f6581b 100644
--- a/src/compiler/scala/tools/nsc/doc/DocFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/DocFactory.scala
@@ -53,10 +53,9 @@ class DocFactory(val reporter: Reporter, val settings: doc.Settings) { processor
assert(settings.docformat.value == "html")
if (!reporter.hasErrors) {
val modelFactory = (new model.ModelFactory(compiler, settings))
- val htmlFactory = (new html.HtmlFactory(reporter, settings))
val docModel = modelFactory.makeModel
println("model contains " + modelFactory.templatesCount + " documentable templates")
- htmlFactory generate docModel
+ (new html.HtmlFactory(docModel)) generate docModel
}
}
diff --git a/src/compiler/scala/tools/nsc/doc/DocProvider.scala b/src/compiler/scala/tools/nsc/doc/DocProvider.scala
deleted file mode 100644
index bcf227ebb9..0000000000
--- a/src/compiler/scala/tools/nsc/doc/DocProvider.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-package scala.tools.nsc.doc
-
-class DocProvider \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/Settings.scala b/src/compiler/scala/tools/nsc/doc/Settings.scala
index 75aff8e4bd..3d02689605 100644
--- a/src/compiler/scala/tools/nsc/doc/Settings.scala
+++ b/src/compiler/scala/tools/nsc/doc/Settings.scala
@@ -26,7 +26,10 @@ class Settings(error: String => Unit) extends scala.tools.nsc.Settings(error) {
* documented. 'Note:'' This setting is currently not used. */
val docversion = StringSetting ("-doc-version", "doc-version", "An optional version number, to be appended to the title", "")
+ /** A setting that defines a URL to be concatenated with source locations and show a link to source files.
+ * If needed the sourcepath option can be used to exclude undesired initial part of the link to sources */
+ val docsourceurl = StringSetting ("-doc-source-url", "url", "The URL prefix where documentation will link to sources", "")
+
// working around issue described in r18708.
suppressVTWarn.value = true
-
}
diff --git a/src/compiler/scala/tools/nsc/doc/SourcelessComments.scala b/src/compiler/scala/tools/nsc/doc/SourcelessComments.scala
index 0791c6fa51..46e45a861b 100644
--- a/src/compiler/scala/tools/nsc/doc/SourcelessComments.scala
+++ b/src/compiler/scala/tools/nsc/doc/SourcelessComments.scala
@@ -23,234 +23,155 @@ abstract class SourcelessComments {
val comment = mutable.HashMap.empty[Symbol, DocComment]
comment(NothingClass) = new DocComment("""
- /** <p>
- * Class <code>Nothing</code> is - together with class <a href="Null.html">
- * <code>Null</code></a> - at the bottom of the
- * <a href="http://scala-lang.org" target="_top">Scala</a> type
- * hierarchy.
- * </p>
- * <p>
- * Type <code>Nothing</code> is a subtype of every other type
- * (including <a href="Null.html"><code>Null</code></a>); there
- * exist <em>no instances</em> of this type. Even though type
- * <code>Nothing</code> is empty, it is nevertheless useful as a
- * type parameter. For instance, the <a href="http://scala-lang.org"
- * target="_top">Scala</a> library defines a value
- * <a href="Nil$object.html"><code>Nil</code></a> of type
- * <code><a href="List.html">List</a>[Nothing]</code>. Because lists
- * are covariant in <a href="http://scala-lang.org" target="_top">Scala</a>,
- * this makes <a href="Nil$object.html"><code>Nil</code></a> an
- * instance of <code><a href="List.html">List</a>[T]</code>, for
- * any element type <code>T</code>.
- * </p> */
+ /** Class `Nothing` is - together with class [[scala.Null]] - at the bottom of Scala's type hierarchy.
+ *
+ * Type `Nothing` is a subtype of every other type (including [[scala.Null]]); there exist ''no instances'' of
+ * this type. Even though type `Nothing` is empty, it is nevertheless useful as a type parameter. For instance,
+ * the Scala library defines a value [[scala.collection.immutable.Nil]] of type `List[Nothing]`. Because lists
+ * are covariant in Scala, this makes [[scala.collection.immutable.Nil]] an instance of `List[T]`, for any
+ * element of type `T`. */
""")
comment(NullClass) = new DocComment("""
- /** <p>
- * Class <code>Null</code> is - together with class <a href="Nothing.html">
- * <code>Nothing</code> - at the bottom of the
- * <a href="http://scala-lang.org" target="_top">Scala</a> type
- * hierarchy.
- * </p>
- * <p>
- * Type <code>Null</code> is a subtype of all reference types; its
- * only instance is the <code>null</code> reference.
- * Since <code>Null</code> is not a subtype of value types,
- * <code>null</code> is not a member of any such type. For instance,
- * it is not possible to assign <code>null</code> to a variable of
- * type <a href="Int.html"><code>Int</code></a>.
- * </p> */
+ /** Class `Null` is - together with class [[scala.Nothing]] - at the bottom of the Scala type hierarchy.
+ *
+ * Type `Null` is a subtype of all reference types; its only instance is the `null` reference. Since `Null` is
+ * not a subtype of value types, `null` is not a member of any such type. For instance, it is not possible to
+ * assign `null` to a variable of type [[scala.Int]]. */
""")
/*******************************************************************/
/* Documentation for Any */
comment(AnyClass) = new DocComment("""
- /** <p>
- * Class <code>Any</code> is the root of the <a
- * href="http://scala-lang.org/"
- * target="_top">Scala</a> class hierarchy. Every class in a
- * <a href="http://scala-lang.org/" target="_top">Scala</a> execution
- * environment inherits directly or indirectly from this class.
- * Class <code>Any</code> has two direct subclasses:
- * <a href="AnyRef.html"><code>AnyRef</code></a> and
- * <a href="AnyVal.html"><code>AnyVal</code></a>.
- * </p> */
+ /** Class `Any` is the root of the Scala class hierarchy. Every class in a Scala execution environment inherits
+ * directly or indirectly from this class. Class `Any` has two direct subclasses: [[scala.AnyRef]] and
+ * [[scala.AnyVal]]. */
""")
comment(Any_equals) = new DocComment("""
- /** This method is used to compare the receiver object (<code>this</code>)
- * with the argument object (<code>arg0</code>) for equivalence.
+ /** This method is used to compare the receiver object (`this`) with the argument object (`arg0`) for equivalence.
*
- * <p>
- * The default implementations of this method is an <a
- * href="http://en.wikipedia.org/wiki/Equivalence_relation">equivalence
- * relation</a>:
- * <ul>
- * <li>It is reflexive: for any instance <code>x</code> of type <code>Any</code>,
- * <code>x.equals(x)</code> should return <code>true</code>.</li>
- * <li>It is symmetric: for any instances <code>x</code> and <code>y</code> of type
- * <code>Any</code>, <code>x.equals(y)</code> should return <code>true</code> if and only
- * if <code>y.equals(x)</code> returns <code>true</code>.</li>
- * <li>It is transitive: for any instances
- * <code>x</code>, <code>y</code>, and <code>z</code> of type <code>AnyRef</code>
- * if <code>x.equals(y)</code> returns <code>true</code> and
- * <code>y.equals(z)</code> returns
- * <code>true</code>, then <code>x.equals(z)</code> should return <code>true</code>.</li>
- *</ul>
- *</p>
+ * The default implementations of this method is an [http://en.wikipedia.org/wiki/Equivalence_relation equivalence
+ * relation]:
+ * * It is reflexive: for any instance `x` of type `Any`, `x.equals(x)` should return `true`.
+ * * It is symmetric: for any instances `x` and `y` of type `Any`, `x.equals(y)` should return `true` if and
+ * only if `y.equals(x)` returns `true`.
+ * * It is transitive: for any instances `x`, `y`, and `z` of type `AnyRef` if `x.equals(y)` returns `true` and
+ * `y.equals(z)` returns `true`, then `x.equals(z)` should return `true`.
*
- * <p>
- * If you override this method, you should verify that
- * your implementation remains an equivalence relation.
- * Additionally, when overriding this method it is often necessary to
- * override <code>hashCode</code> to ensure that objects that are
- * "equal" (<code>o1.equals(o2)</code> returns <code>true</code>)
- * hash to the same <a href="Int.html"><code>Int</code></a>
- * (<code>o1.hashCode.equals(o2.hashCode)</code>).
+ * If you override this method, you should verify that your implementation remains an equivalence relation.
+ * Additionally, when overriding this method it is often necessary to override `hashCode` to ensure that objects
+ * that are "equal" (`o1.equals(o2)` returns `true`) hash to the same [[scala.Int]]
+ * (`o1.hashCode.equals(o2.hashCode)`).
*
* @param arg0 the object to compare against this object for equality.
- * @return <code>true</code> if the receiver object is equivalent to the argument; <code>false</code> otherwise.
- * </p> */
+ * @return `true` if the receiver object is equivalent to the argument; `false` otherwise. */
""")
comment(Any_==) = new DocComment("""
/** `o == arg0` is the same as `o.equals(arg0)`.
- * <p>
+ *
* @param arg0 the object to compare against this object for equality.
- * @return `true` if the receiver object is equivalent to the argument; `false` otherwise.
- * </p> */
+ * @return `true` if the receiver object is equivalent to the argument; `false` otherwise. */
""")
comment(Any_!=) = new DocComment("""
/** `o != arg0` is the same as `!(o == (arg0))`.
- * <p>
+ *
* @param arg0 the object to compare against this object for dis-equality.
- * @return `false` if the receiver object is equivalent to the argument; `true` otherwise.
- * </p> */
+ * @return `false` if the receiver object is equivalent to the argument; `true` otherwise. */
""")
comment(Any_toString) = new DocComment("""
/** Returns a string representation of the object.
- * <p>
+ *
* The default representation is platform dependent.
*
- * @return a string representation of the object.
- * </p>*/
+ * @return a string representation of the object. */
""")
comment(Any_asInstanceOf) = new DocComment("""
- /**This method is used to cast the receiver object to be of type <code>T0</code>.
+ /** This method is used to cast the receiver object to be of type `T0`.
*
- * <p>Note that the success of a cast at runtime is modulo Scala's
- * erasure semantics. Therefore the expression
- * <code>1.asInstanceOf[String]</code> will throw a
- * <code>ClassCastException</code> at runtime, while the expression
- * <code>List(1).asInstanceOf[List[String]]</code> will not. In the
- * latter example, because the type argument is erased as part of
- * compilation it is not possible to check whether the contents of
- * the list are of the requested typed.
+ * Note that the success of a cast at runtime is modulo Scala's erasure semantics. Therefore the expression
+ * `1.asInstanceOf[String]` will throw a `ClassCastException` at runtime, while the expression
+ * `List(1).asInstanceOf[List[String]]` will not. In the latter example, because the type argument is erased as
+ * part of compilation it is not possible to check whether the contents of the list are of the requested typed.
*
- * @throws ClassCastException if the receiver object is not an
- * instance of erasure of type <code>T0</code>.
- * @return the receiver object.
- * </p> */
+ * @throws ClassCastException if the receiver object is not an instance of erasure of type `T0`.
+ * @return the receiver object. */
""")
comment(Any_isInstanceOf) = new DocComment("""
- /** This method is used to test whether the dynamic type of the receiver object is <code>T0</code>.
+ /** This method is used to test whether the dynamic type of the receiver object is `T0`.
*
- * <p>Note that the test result of the test is modulo Scala's erasure
- * semantics. Therefore the expression
- * <code>1.isInstanceOf[String]</code> will return
- * <code>false</code>, while the expression
- * <code>List(1).isInstanceOf[List[String]]</code> will return
- * <code>true</code>. In the latter example, because the type
- * argument is erased as part of compilation it is not possible to
- * check whether the contents of the list are of the requested typed.
+ * Note that the test result of the test is modulo Scala's erasure semantics. Therefore the expression
+ * `1.isInstanceOf[String]` will return `false`, while the expression `List(1).isInstanceOf[List[String]]` will
+ * return `true`. In the latter example, because the type argument is erased as part of compilation it is not
+ * possible to check whether the contents of the list are of the requested typed.
*
- * @return <code>true</code> if the receiver object is an
- * instance of erasure of type <code>T0</code>; <code>false</code> otherwise. */
+ * @return `true` if the receiver object is an instance of erasure of type `T0`; `false` otherwise. */
""")
comment(Any_hashCode) = new DocComment("""
/** Returns a hash code value for the object.
*
- * <p>
* The default hashing algorithm is platform dependent.
*
- * Note that it is allowed for two objects to have identical hash
- * codes (<code>o1.hashCode.equals(o2.hashCode)</code>) yet not be
- * equal (<code>o1.equals(o2)</code> returns <code>false</code>). A
- * degenerate implementation could always return <code>0</code>.
- * However, it is required that if two objects are equal
- * (<code>o1.equals(o2)</code> returns <code>true</code>) that they
- * have identical hash codes
- * (<code>o1.hashCode.equals(o2.hashCode)</code>). Therefore, when
- * overriding this method, be sure to verify that the behavior is
- * consistent with the <code>equals</code> method.
- * </p>
+ * Note that it is allowed for two objects to have identical hash codes (`o1.hashCode.equals(o2.hashCode)`) yet
+ * not be equal (`o1.equals(o2)` returns `false`). A degenerate implementation could always return `0`.
+ * However, it is required that if two objects are equal (`o1.equals(o2)` returns `true`) that they have
+ * identical hash codes (`o1.hashCode.equals(o2.hashCode)`). Therefore, when overriding this method, be sure
+ * to verify that the behavior is consistent with the `equals` method.
*
- * <p>
- * @return the hash code value for the object.
- * </p> */
+ * @return the hash code value for the object. */
""")
/*******************************************************************/
/* Documentation for AnyRef */
comment(AnyRefClass) = new DocComment("""
- /** <p>
- * Class <code>AnyRef</code> is the root class of all
- * <em>reference types</em>.
- * </p> */
+ /** Class `AnyRef` is the root class of all ''reference types''. */
""")
comment(Object_==) = new DocComment("""
- /** <code>o == arg0</code> is the same as <code>if (o eq null) arg0 eq null else o.equals(arg0)</code>.
- * <p>
+ /** `o == arg0` is the same as `if (o eq null) arg0 eq null else o.equals(arg0)`.
+ *
* @param arg0 the object to compare against this object for equality.
- * @return <code>true</code> if the receiver object is equivalent to the argument; <code>false</code> otherwise.
- * </p> */
+ * @return `true` if the receiver object is equivalent to the argument; `false` otherwise. */
""")
comment(Object_ne) = new DocComment("""
- /** <code>o.ne(arg0)</code> is the same as <code>!(o.eq(arg0))</code>.
- * <p>
+ /** `o.ne(arg0)` is the same as `!(o.eq(arg0))`.
+ *
* @param arg0 the object to compare against this object for reference dis-equality.
- * @return <code>false</code> if the argument is not a reference to the receiver object; <code>true</code> otherwise.
- * </p> */
+ * @return `false` if the argument is not a reference to the receiver object; `true` otherwise. */
""")
comment(Object_finalize) = new DocComment("""
- /** This method is called by the garbage collector on the receiver object when garbage
- * collection determines that there are no more references to the object.
- * <p>
- * The details of when and if the <code>finalize</code> method are
- * invoked, as well as the interaction between <code>finalize</code>
- * and non-local returns and exceptions, are all platform dependent.
- * </p> */
+ /** This method is called by the garbage collector on the receiver object when garbage collection determines that
+ * there are no more references to the object.
+ *
+ * The details of when and if the `finalize` method are invoked, as well as the interaction between `finalize`
+ * and non-local returns and exceptions, are all platform dependent. */
""")
comment(Object_clone) = new DocComment("""
/** This method creates and returns a copy of the receiver object.
*
- * <p>
- * The default implementation of the <code>clone</code> method is platform dependent.
+ * The default implementation of the `clone` method is platform dependent.
*
- * @return a copy of the receiver object.
- * </p> */
+ * @return a copy of the receiver object. */
""")
comment(Object_getClass) = new DocComment("""
/** Returns a representation that corresponds to the dynamic class of the receiver object.
*
- * <p>
* The nature of the representation is platform dependent.
*
- * @return a representation that corresponds to the dynamic class of the receiver object.
- * </p> */
+ * @return a representation that corresponds to the dynamic class of the receiver object. */
""")
comment(Object_notify) = new DocComment("""
@@ -262,113 +183,63 @@ abstract class SourcelessComments {
""")
comment(Object_eq) = new DocComment("""
- /** This method is used to test whether the argument (<code>arg0</code>) is a reference to the
- * receiver object (<code>this</code>).
- *
- * <p>
- * The <code>eq</code> method implements an
- * <a href="http://en.wikipedia.org/wiki/Equivalence_relation">equivalence relation</a> on non-null instances of
- * <code>AnyRef</code>:
- * <ul>
- * <li>It is reflexive: for any non-null instance <code>x</code> of type <code>AnyRef</code>,
- * <code>x.eq(x)</code> returns <code>true</code>.</li>
- * <li>It is symmetric: for any non-null instances <code>x</code> and <code>y</code> of type
- * <code>AnyRef</code>, <code>x.eq(y)</code> returns <code>true</code> if and only
- * if <code>y.eq(x)</code> returns <code>true</code>.</li>
- * <li>It is transitive: for any non-null instances
- * <code>x</code>, <code>y</code>, and <code>z</code> of type <code>AnyRef</code>
- * if <code>x.eq(y)</code> returns <code>true</code> and
- * <code>y.eq(z)</code> returns
- * <code>true</code>, then <code>x.eq(z)</code> returns <code>true</code>.</li>
- * </ul>
- * Additionally, the <code>eq</code> method has three other properties.
- * <ul>
- * <li>It is consistent: for any non-null instances <code>x</code> and <code>y</code> of type <code>AnyRef</code>,
- * multiple invocations of <code>x.eq(y)</code> consistently returns <code>true</code>
- * or consistently returns <code>false</code>.</li>
- * <li>For any non-null instance <code>x</code> of type <code>AnyRef</code>,
- * <code>x.eq(null)</code> and <code>null.eq(x)</code> returns <code>false</code>.</li>
- * <li><code>null.eq(null)</code> returns <code>true</code>.</li>
- *</ul>
- *</p>
- *
- * <p> When overriding the <code>equals</code> or
- * <code>hashCode</code> methods, it is important to ensure that
- * their behavior is consistent with reference equality. Therefore,
- * if two objects are references to each other (<code>o1 eq
- * o2</code>), they should be equal to each other (<code>o1 ==
- * o2</code>) and they should hash to the same value
- * (<code>o1.hashCode == o2.hashCode</code>).</p>
+ /** This method is used to test whether the argument (`arg0`) is a reference to the
+ * receiver object (`this`).
+ *
+ * The `eq` method implements an [http://en.wikipedia.org/wiki/Equivalence_relation equivalence relation] on
+ * non-null instances of `AnyRef`:
+ * * It is reflexive: for any non-null instance `x` of type `AnyRef`, `x.eq(x)` returns `true`.
+ * * It is symmetric: for any non-null instances `x` and `y` of type `AnyRef`, `x.eq(y)` returns `true` if and
+ * only if `y.eq(x)` returns `true`.
+ * * It is transitive: for any non-null instances `x`, `y`, and `z` of type `AnyRef` if `x.eq(y)` returns `true`
+ * and `y.eq(z)` returns `true`, then `x.eq(z)` returns `true`.
+ *
+ * Additionally, the `eq` method has three other properties.
+ * * It is consistent: for any non-null instances `x` and `y` of type `AnyRef`, multiple invocations of
+ * `x.eq(y)` consistently returns `true` or consistently returns `false`.
+ * * For any non-null instance `x` of type `AnyRef`, `x.eq(null)` and `null.eq(x)` returns `false`.
+ * * `null.eq(null)` returns `true`.
+ *
+ * When overriding the `equals` or `hashCode` methods, it is important to ensure that their behavior is
+ * consistent with reference equality. Therefore, if two objects are references to each other (`o1 eq o2`), they
+ * should be equal to each other (`o1 == o2`) and they should hash to the same value (`o1.hashCode == o2.hashCode`).
*
* @param arg0 the object to compare against this object for reference equality.
- * @return <code>true</code> if the argument is a reference to the receiver object; <code>false</code> otherwise.
- * </p> */
+ * @return `true` if the argument is a reference to the receiver object; `false` otherwise. */
""")
/*******************************************************************/
comment(AnyValClass) = new DocComment("""
- /** <p>
- * Class <code>AnyVal</code> is the root class of all
- * <em>value types</em>.
- * </p>
- * <p>
- * <code>AnyVal</code> has a fixed number subclasses, which
- * describe values which are not implemented as objects in the
+ /** Class `AnyVal` is the root class of all ''value types''.
+ *
+ * `AnyVal` has a fixed number of subclasses, which describe values which are not implemented as objects in the
* underlying host system.
- * </p>
- * <p>
- * Classes <a href="Double.html"><code>Double</code></a>,
- * <a href="Float.html"><code>Float</code></a>,
- * <a href="Long.html"><code>Long</code></a>,
- * <a href="Int.html"><code>Int</code></a>,
- * <a href="Char.html"><code>Char</code></a>,
- * <a href="Short.html"><code>Short</code></a>, and
- * <a href="Byte.html"><code>Byte</code></a> are together called
- * <em>numeric value types</em>.
- * Classes <a href="Byte.html"><code>Byte</code></a>,
- * <a href="Short.html"><code>Short</code></a>, or
- * <a href="Char.html"><code>Char</code></a>
- * are called <em>subrange types</em>. Subrange types, as well as
- * <a href="Int.html"><code>Int</code></a> and
- * <a href="Long.html"><code>Long</code></a> are called
- * <em>integer types</em>, whereas
- * <a href="Float.html"><code>Float</code></a> and
- * <a href="Double.html"><code>Double</code></a> are called
- * <em>floating point types</em>.
- * </p> */
+ *
+ * Classes [[scala.Double]], [[scala.Float]], [[scala.Long]], [[scala.Int]], [[scala.Char]], [[scala.Short]],
+ * and [[scala.Byte]] are together called ''numeric value types''. Classes [[scala.Byte]], [[scala.Short]], and
+ * [[scala.Char]] are called ''subrange types''. Subrange types, as well as [[scala.Int]] and [[scala.Long]] are
+ * called ''integer types'', whereas [[scala.Float]] and [[scala.Double]] are called ''floating point types''. */
""")
comment(BooleanClass) = new DocComment("""
- /** <p>
- * Class <code>Boolean</code> has only two values: <code>true</code>
- * and <code>false</code>.
- * </p> */
+ /** Class `Boolean` has only two values: `true` and `false`. */
""")
comment(UnitClass) = new DocComment("""
- /** <p>
- * Class <code>Unit</code> has only one value: <code>()</code>.
- * </p> */
+ /** Class `Unit` has only one value: `()`. */
""")
List(ByteClass, CharClass, DoubleClass, LongClass, FloatClass, IntClass, ShortClass) foreach { sym =>
val maxValue = "MAX_" + sym.name.toString().toUpperCase()
val minValue = "MIN_" + sym.name.toString().toUpperCase()
comment(sym) = new DocComment("""
- /** <p>
- * Class <code>""" + sym.name + """</code> belongs to the value
- * classes whose instances are not represented as objects by the
- * underlying host system. There is an implicit conversion from
- * instances of <code>""" + sym.name + """</code> to instances of
- * <a href="runtime/Rich""" + sym.name + """.html"><code>runtime.Rich""" + sym.name + """</code></a> which
- * provides useful non-primitive operations. All value classes inherit
- * from class <a href="AnyVal.html"><code>AnyVal</code></a>.
- * </p>
- * <p>
- * Values <code>""" + maxValue + """</code> and <code>""" + minValue + """</code>
- * are in defined in object <a href="Math$object.html">scala.Math</a>.
- * </p> */
+ /** Class `""" + sym.name + """` belongs to the value classes whose instances are not represented as objects by
+ * the underlying host system. There is an implicit conversion from instances of `""" + sym.name + """` to
+ * instances of [[scala.runtime.Rich""" + sym.name + """]] which provides useful non-primitive operations.
+ * All value classes inherit from class [[scala.AnyVal]].
+ *
+ * Values `""" + maxValue + """` and `""" + minValue + """` are defined in object [[scala.Math]]. */
""")
}
diff --git a/src/compiler/scala/tools/nsc/doc/Universe.scala b/src/compiler/scala/tools/nsc/doc/Universe.scala
new file mode 100644
index 0000000000..666a06dc4b
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/Universe.scala
@@ -0,0 +1,8 @@
+package scala.tools.nsc.doc
+
+/**
+ * Class to hold common dependencies across Scaladoc classes.
+ * @author Pedro Furlanetto
+ * @author Gilles Dubochet
+ */
+class Universe(val settings: Settings, val rootPackage: model.Package)
diff --git a/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala b/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
index bb6095a04e..f695d0067f 100644
--- a/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
@@ -7,48 +7,43 @@ package scala.tools.nsc
package doc
package html
+import java.io.{ File => JFile }
+import io.{ Streamable, Directory }
import reporters.Reporter
import model._
-
-import java.io.{FileOutputStream, File}
import scala.collection._
/** A class that can generate Scaladoc sites to some fixed root folder.
* @author David Bernard
* @author Gilles Dubochet */
-class HtmlFactory(val reporter: Reporter, val settings: Settings) {
+class HtmlFactory(val universe: Universe) {
/** The character encoding to be used for generated Scaladoc sites. This value is currently always UTF-8. */
def encoding: String = "UTF-8"
- /** The character encoding to be used for generated Scaladoc sites. This value is defined by the generator's
- * settings. */
- def siteRoot: File = new File(settings.outdir.value)
+ def siteRoot: JFile = new JFile(universe.settings.outdir.value)
- /** Generates the Scaladoc site for a model into the site toot. A scaladoc site is a set of HTML and related files
+ /** Generates the Scaladoc site for a model into the site root. A scaladoc site is a set of HTML and related files
* that document a model extracted from a compiler run.
* @param model The model to generate in the form of a sequence of packages. */
- def generate(modelRoot: Package): Unit = {
-
+ def generate(universe: Universe): Unit = {
def copyResource(subPath: String) {
- val buf = new Array[Byte](1024)
- val in = getClass.getResourceAsStream("/scala/tools/nsc/doc/html/resource/" + subPath)
- assert(in != null)
- val dest = new File(siteRoot, subPath)
- dest.getParentFile.mkdirs()
- val out = new FileOutputStream(dest)
- try {
- var len = 0
- while ({len = in.read(buf); len != -1})
- out.write(buf, 0, len)
- }
- finally {
- in.close()
- out.close()
- }
+ val bytes = new Streamable.Bytes {
+ val inputStream = getClass.getResourceAsStream("/scala/tools/nsc/doc/html/resource/" + subPath)
+ assert(inputStream != null)
+ } . toByteArray()
+
+ val dest = Directory(siteRoot) / subPath
+ dest.parent.createDirectory()
+ val out = dest.toFile.bufferedOutput()
+
+ try out.write(bytes, 0, bytes.length)
+ finally out.close()
}
copyResource("lib/jquery.js")
+ copyResource("lib/tools.tooltip.js")
+ copyResource("lib/scheduler.js")
copyResource("lib/index.css")
copyResource("lib/index.js")
copyResource("lib/template.css")
@@ -61,8 +56,11 @@ class HtmlFactory(val reporter: Reporter, val settings: Settings) {
copyResource("lib/trait_big.png")
copyResource("lib/package.png")
copyResource("lib/package_big.png")
+ copyResource("lib/filter_box_left.png")
+ copyResource("lib/filter_box_right.png")
+ copyResource("lib/remove.png")
- new page.Index(modelRoot) writeFor this
+ new page.Index(universe) writeFor this
val written = mutable.HashSet.empty[DocTemplateEntity]
@@ -72,7 +70,7 @@ class HtmlFactory(val reporter: Reporter, val settings: Settings) {
tpl.templates filter { t => !(written contains t) } map (writeTemplate(_))
}
- writeTemplate(modelRoot)
+ writeTemplate(universe.rootPackage)
}
diff --git a/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala b/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala
index 455f9697ce..74ec740204 100644
--- a/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala
@@ -13,7 +13,7 @@ import comment._
import xml.{Unparsed, XML, NodeSeq}
import xml.dtd.{DocType, PublicID}
import scala.collection._
-import scala.util.NameTransformer
+import scala.reflect.NameTransformer
import java.nio.channels.Channels
import java.io.{FileOutputStream, File}
@@ -40,9 +40,6 @@ abstract class HtmlPage { thisPage =>
* also defined by the generator.
* @param generator The generator that is writing this page. */
def writeFor(site: HtmlFactory): Unit = {
- val pageFile = new File(site.siteRoot, absoluteLinkTo(thisPage.path))
- val pageFolder = pageFile.getParentFile
- if (!pageFolder.exists) pageFolder.mkdirs()
val doctype =
DocType("html", PublicID("-//W3C//DTD XHTML 1.1//EN", "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd"), Nil)
val html =
@@ -55,6 +52,9 @@ abstract class HtmlPage { thisPage =>
</head>
{ body }
</html>
+ val pageFile = new File(site.siteRoot, absoluteLinkTo(thisPage.path))
+ val pageFolder = pageFile.getParentFile
+ if (!pageFolder.exists) pageFolder.mkdirs()
val fos = new FileOutputStream(pageFile.getPath)
val w = Channels.newWriter(fos.getChannel, site.encoding)
try {
@@ -62,7 +62,10 @@ abstract class HtmlPage { thisPage =>
w.write( doctype.toString + "\n")
w.write(xml.Xhtml.toXhtml(html))
}
- finally { w.close() ; fos.close() }
+ finally {
+ w.close()
+ fos.close()
+ }
//XML.save(pageFile.getPath, html, site.encoding, xmlDecl = false, doctype = doctype)
}
@@ -132,24 +135,36 @@ abstract class HtmlPage { thisPage =>
case Paragraph(in) => <p>{ inlineToHtml(in) }</p>
case Code(data) => <pre>{ Unparsed(data) }</pre>
case UnorderedList(items) =>
- <ul>{items map { i => <li>{ blockToHtml(i) }</li>}}</ul>
- case OrderedList(items) =>
- <ol>{items map { i => <li>{ blockToHtml(i) }</li>}}</ol>
+ <ul>{ listItemsToHtml(items) }</ul>
+ case OrderedList(items, listStyle) =>
+ <ol class={ listStyle }>{ listItemsToHtml(items) }</ol>
case DefinitionList(items) =>
<dl>{items map { case (t, d) => <dt>{ inlineToHtml(t) }</dt><dd>{ blockToHtml(d) }</dd> } }</dl>
case HorizontalRule() =>
<hr/>
}
+ def listItemsToHtml(items: Seq[Block]) =
+ items.foldLeft(xml.NodeSeq.Empty){ (xmlList, item) =>
+ item match {
+ case OrderedList(_, _) | UnorderedList(_) => // html requires sub ULs to be put into the last LI
+ xmlList.init ++ <li>{ xmlList.last.child ++ blockToHtml(item) }</li>
+ case Paragraph(inline) =>
+ xmlList :+ <li>{ inlineToHtml(inline) }</li> // LIs are blocks, no need to use Ps
+ case block =>
+ xmlList :+ <li>{ blockToHtml(block) }</li>
+ }
+ }
+
def inlineToHtml(inl: Inline): NodeSeq = inl match {
- //case URLLink(url, text) => <a href={url}>{if(text.isEmpty)url else inlineSeqsToXml(text)}</a>
case Chain(items) => items flatMap (inlineToHtml(_))
case Italic(in) => <i>{ inlineToHtml(in) }</i>
case Bold(in) => <b>{ inlineToHtml(in) }</b>
case Underline(in) => <u>{ inlineToHtml(in) }</u>
case Superscript(in) => <sup>{ inlineToHtml(in) }</sup>
case Subscript(in) => <sub>{ inlineToHtml(in) }</sub>
- case Link(raw) => Unparsed(raw)//error("link not supported") // TODO
+ case Link(raw, title) => <a href={ raw }>{ inlineToHtml(title) }</a>
+ case EntityLink(entity) => templateToHtml(entity)
case Monospace(text) => <code>{ Unparsed(text) }</code>
case Text(text) => Unparsed(text)
}
@@ -171,7 +186,7 @@ abstract class HtmlPage { thisPage =>
val (tpl, width) = tpe.refEntity(inPos)
(tpl match {
case dtpl:DocTemplateEntity if hasLinks =>
- <a href={ relativeLinkTo(tpl) } class="extype" name={ dtpl.qualifiedName }>{
+ <a href={ relativeLinkTo(dtpl) } class="extype" name={ dtpl.qualifiedName }>{
string.slice(inPos, inPos + width)
}</a>
case tpl =>
@@ -187,7 +202,7 @@ abstract class HtmlPage { thisPage =>
/** Returns the HTML code that represents the template in `tpl` as a hyperlinked name. */
def templateToHtml(tpl: TemplateEntity) = tpl match {
case dTpl: DocTemplateEntity =>
- <a href={ relativeLinkTo(dTpl) }>{ dTpl.name }</a>
+ <a href={ relativeLinkTo(dTpl) } class="extype" name={ dTpl.qualifiedName }>{ dTpl.name }</a>
case ndTpl: NoDocTemplate =>
xml.Text(ndTpl.name)
}
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Index.scala b/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
index 59ac4eb105..7bbd8ef821 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
@@ -13,25 +13,41 @@ import model._
import scala.collection._
import scala.xml._
-class Index(modelRoot: Package) extends HtmlPage {
+class Index(universe: Universe) extends HtmlPage {
def path = List("index.html")
- def title = "Scaladoc: all classes and objects"
+ def title = {
+ val s = universe.settings
+ ( if (!s.doctitle.isDefault) s.doctitle.value else "" ) +
+ ( if (!s.docversion.isDefault) (" " + s.docversion.value) else "" )
+ }
def headers =
<xml:group>
- <style type="text/css">
- @import url({ relativeLinkTo(List("index.css", "lib")) }) screen;
- </style>
- <script type="text/javascript" src={ relativeLinkTo{List("index.js", "lib")} }></script>
+ <link href={ relativeLinkTo(List("index.css", "lib")) } media="screen" type="text/css" rel="stylesheet"/>
+ <script type="text/javascript" src={ relativeLinkTo{List("index.js", "lib")} }></script>
+ <script type="text/javascript" src={ relativeLinkTo{List("scheduler.js", "lib")} }></script>
</xml:group>
def body =
<body>
+ <div id="library">
+ <img class='class icon' src='lib/class.png'/>
+ <img class='trait icon' src='lib/trait.png'/>
+ <img class='object icon' src='lib/object.png'/>
+ <img class='package icon' src='lib/package.png'/>
+ </div>
<div id="browser">
- <input id="quickflt" type="text" accesskey="/"/>
- <div id="tpl">{
+ <div id="filter"></div>
+ <div class="pack" id="tpl">{
+ def isExcluded(dtpl: DocTemplateEntity) = {
+ val qname = dtpl.qualifiedName
+ (qname.startsWith("scala.Tuple") || qname.startsWith("scala.Product") || qname.startsWith("scala.Function")) &&
+ !(qname=="scala.Function1" || qname=="scala.Function2" || qname=="scala.Function" ||
+ qname=="scala.Product1" || qname=="scala.Product2" || qname=="scala.Product" ||
+ qname=="scala.Tuple1" || qname=="scala.Tuple2")
+ }
def packageElem(pack: model.Package): NodeSeq = {
<xml:group>
{ if (!pack.isRootPackage)
@@ -40,8 +56,8 @@ class Index(modelRoot: Package) extends HtmlPage {
}
<ol class="templates">{
val tpls: Map[String, Seq[DocTemplateEntity]] =
- (pack.templates filter (!_.isPackage)) groupBy (_.name)
- for (tn <- tpls.keySet.toSeq sortWith (_.toLowerCase < _.toLowerCase)) yield {
+ (pack.templates filter (t => !t.isPackage && !isExcluded(t) )) groupBy (_.name)
+ for (tn <- tpls.keySet.toSeq sortBy (_.toLowerCase)) yield {
val entries = tpls(tn) sortWith { (less, more) => less.isTrait || more.isObject }
def doEntry(ety: DocTemplateEntity, firstEty: Boolean): NodeSeq = {
val etyTpe =
@@ -57,16 +73,16 @@ class Index(modelRoot: Package) extends HtmlPage {
}
}</ol>
<ol class="packages"> {
- for (sp <- pack.packages sortWith (_.name.toLowerCase < _.name.toLowerCase)) yield
- <li>{ packageElem(sp) }</li>
+ for (sp <- pack.packages sortBy (_.name.toLowerCase)) yield
+ <li class="pack" title={ sp.qualifiedName }>{ packageElem(sp) }</li>
}</ol>
</xml:group>
}
- packageElem(modelRoot)
+ packageElem(universe.rootPackage)
}</div>
</div>
<div id="content">
- <iframe src={ relativeLinkTo{List("package.html")} }/>
+ <iframe name="template" src={ relativeLinkTo{List("package.html")} }/>
</div>
</body>
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Source.scala b/src/compiler/scala/tools/nsc/doc/html/page/Source.scala
index f0206db4f4..22568e0a88 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/Source.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/page/Source.scala
@@ -24,7 +24,7 @@ class Source(sourceFile: File) extends HtmlPage {
val body =
<body>
- <h1>Page source is not implmented yet</h1>
+ <h1>Page source is not implemented yet</h1>
</body>
/*
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala b/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
index 119823ff13..77b8d96e1c 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
@@ -22,22 +22,21 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
val headers =
<xml:group>
- <style type="text/css">
- @import url({ relativeLinkTo(List("template.css", "lib")) }) screen;
- </style>
- <script type="text/javascript" src={ relativeLinkTo{List("template.js", "lib")} }></script>
+ <link href={ relativeLinkTo(List("template.css", "lib")) } media="screen" type="text/css" rel="stylesheet"/>
+ <script type="text/javascript" src={ relativeLinkTo{List("template.js", "lib")} }></script>
+ <script type="text/javascript" src={ relativeLinkTo{List("tools.tooltip.js", "lib")} }></script>
</xml:group>
val valueMembers =
- (tpl.methods ::: tpl.values ::: (tpl.templates filter { tpl => tpl.isObject || tpl.isPackage })) sortWith (_.name < _.name)
+ (tpl.methods ::: tpl.values ::: (tpl.templates filter { tpl => tpl.isObject || tpl.isPackage })) sortBy (_.name)
val typeMembers =
- (tpl.abstractTypes ::: tpl.aliasTypes ::: (tpl.templates filter { tpl => tpl.isTrait || tpl.isClass })) sortWith (_.name < _.name)
+ (tpl.abstractTypes ::: tpl.aliasTypes ::: (tpl.templates filter { tpl => tpl.isTrait || tpl.isClass })) sortBy (_.name)
val constructors = (tpl match {
case cls: Class => cls.constructors
case _ => Nil
- }) sortWith (_.name < _.name)
+ }) sortBy (_.name)
val body =
<body class={ if (tpl.isTrait || tpl.isClass) "type" else "value" }>
@@ -58,14 +57,19 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
<div id="template">
- { if (tpl.linearization.isEmpty) NodeSeq.Empty else
- <div id="mbrsel">
+ <div id="mbrsel">
+ { if (tpl.linearization.isEmpty) NodeSeq.Empty else
<div id="ancestors">
- <h3>Inherits</h3>
- <ol>{ tpl.linearization map { wte => <li class="in" name={ wte.qualifiedName }>{ wte.name }</li> } }</ol>
+ <h3>Inherited</h3>
+ <ol><li class="hideall">Hide All</li><li class="showall">Show all</li></ol>
+ <ol id="linearization">{ tpl.linearization map { wte => <li class="in" name={ wte.qualifiedName }>{ wte.name }</li> } }</ol>
</div>
- </div>
- }
+ }
+ <div id="visbl">
+ <h3>Visibility</h3>
+ <ol><li class="public in">Public</li><li class="all out">All</li></ol>
+ </div>
+ </div>
{ if (typeMembers.isEmpty) NodeSeq.Empty else
<div id="types" class="members">
@@ -90,11 +94,13 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
</div>
+ <div id="tooltip" ></div>
+
</body>
def memberToHtml(mbr: MemberEntity): NodeSeq = {
val attributes: List[comment.Body] = Nil
- <li name={ mbr.definitionName }>
+ <li name={ mbr.definitionName } visbl={ if (mbr.visibility.isProtected) "prt" else "pub" }>
{ signature(mbr, false) }
{ memberToCommentHtml(mbr, false) }
</li>
@@ -146,12 +152,14 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
}
paramEntry ++ paramCommentToHtml(rest)
}
- if (mbr.comment.isEmpty) NodeSeq.Empty else {
+ if (mbr.comment.isEmpty) NodeSeq.Empty
+ else {
val cmtedPrs = prs filter {
case tp: TypeParam => mbrCmt.typeParams isDefinedAt tp.name
case vp: ValueParam => mbrCmt.valueParams isDefinedAt vp.name
}
- if (cmtedPrs.isEmpty) NodeSeq.Empty else
+ if (cmtedPrs.isEmpty && mbrCmt.result.isEmpty) NodeSeq.Empty
+ else
<dl class="paramcmts block">{
paramCommentToHtml(cmtedPrs) ++ (
mbrCmt.result match {
@@ -162,12 +170,21 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
}</dl>
}
}
- { val fvs: List[comment.Paragraph] = mbr.visibility.toList ::: mbr.flags
+ { val fvs: List[comment.Paragraph] = visibility(mbr).toList ::: mbr.flags
if (fvs.isEmpty) NodeSeq.Empty else
<div class="block">
attributes: { fvs map { fv => { inlineToHtml(fv.text) ++ xml.Text(" ") } } }
</div>
}
+ { tpl.companion match {
+ case Some(companion) if isSelf =>
+ <div class="block">
+ Go to: <a href={relativeLinkTo(companion)}>companion</a>
+ </div>
+ case _ =>
+ NodeSeq.Empty
+ }
+ }
{ val inDefTpls = mbr.inDefinitionTemplates
if (inDefTpls.tail.isEmpty && (inDefTpls.head == mbr.inTemplate)) NodeSeq.Empty else {
<div class="block">
@@ -176,24 +193,59 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
}
}
{ mbr match {
- case dtpl: DocTemplateEntity if isSelf =>
- val subClss = dtpl.subClasses
- if (subClss.isEmpty) NodeSeq.Empty else
- <div class="block">
- known subclasses: { templatesToHtml(dtpl.subClasses, xml.Text(", ")) }
- </div>
+ case dtpl: DocTemplateEntity if (isSelf && !dtpl.subClasses.isEmpty) =>
+ <div class="block">
+ known subclasses: { templatesToHtml(dtpl.subClasses, xml.Text(", ")) }
+ </div>
case _ => NodeSeq.Empty
}
}
- { tpl.companion match {
- case Some(companion) =>
+ { mbr match {
+ case dtpl: DocTemplateEntity if (isSelf && dtpl.sourceUrl.isDefined) =>
+ val sourceUrl = tpl.sourceUrl.get
<div class="block">
- Go to: <a href={relativeLinkTo(companion)}>companion</a>
+ source: { <a href={ sourceUrl.toString }>{ Text(new java.io.File(sourceUrl.getPath).getName) }</a> }
</div>
- case None =>
- NodeSeq.Empty
+ case _ => NodeSeq.Empty
}
}
+ { if(mbr.deprecation.isEmpty) NodeSeq.Empty else
+ <div class="block"><ol>deprecated:
+ { <li>{ bodyToHtml(mbr.deprecation.get) }</li> }
+ </ol></div>
+ }
+ { for(comment <- mbr.comment.toList) yield {
+ <xml:group>
+ { if(!comment.version.isEmpty)
+ <div class="block"><ol>version
+ { for(body <- comment.version.toList) yield <li>{bodyToHtml(body)}</li> }
+ </ol></div>
+ else NodeSeq.Empty
+ }
+ { if(!comment.since.isEmpty)
+ <div class="block"><ol>since
+ { for(body <- comment.since.toList) yield <li>{bodyToHtml(body)}</li> }
+ </ol></div>
+ else NodeSeq.Empty
+ }
+ { if(!comment.see.isEmpty)
+ <div class="block"><ol>see also:
+ { val seeXml:List[scala.xml.NodeSeq]=(for(see <- comment.see ) yield <li>{bodyToHtml(see)}</li> )
+ seeXml.reduceLeft(_ ++ Text(", ") ++ _)
+ }
+ </ol></div>
+ else NodeSeq.Empty
+ }
+ { if(!comment.authors.isEmpty)
+ <div class="block"><ol>authors:
+ { val authorsXml:List[scala.xml.NodeSeq]=(for(author <- comment.authors ) yield <li>{bodyToHtml(author)}</li> )
+ authorsXml.reduceLeft(_ ++ Text(", ") ++ _)
+ }
+ </ol></div>
+ else NodeSeq.Empty
+ }
+ </xml:group>
+ }}
</xml:group>
def kindToString(mbr: MemberEntity): String = mbr match {
@@ -212,13 +264,34 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
bound0(hi, " <: ") ++ bound0(lo, " >: ")
}
+ def visibility(mbr: MemberEntity): Option[comment.Paragraph] = {
+ import comment._
+ import comment.{ Text => CText }
+ mbr.visibility match {
+ case PrivateInInstance() =>
+ Some(Paragraph(CText("private[this]")))
+ case PrivateInTemplate(owner) if (owner == mbr.inTemplate) =>
+ Some(Paragraph(CText("private")))
+ case PrivateInTemplate(owner) =>
+ Some(Paragraph(Chain(List(CText("private["), EntityLink(owner), CText("]")))))
+ case ProtectedInInstance() =>
+ Some(Paragraph(CText("protected[this]")))
+ case ProtectedInTemplate(owner) if (owner == mbr.inTemplate) =>
+ Some(Paragraph(CText("protected")))
+ case ProtectedInTemplate(owner) =>
+ Some(Paragraph(Chain(List(CText("protected["), EntityLink(owner), CText("]")))))
+ case Public() =>
+ None
+ }
+ }
+
/** name, tparams, params, result */
def signature(mbr: MemberEntity, isSelf: Boolean): NodeSeq = {
def inside(hasLinks: Boolean): NodeSeq =
<xml:group>
<span class="kind">{ kindToString(mbr) }</span>
<span class="symbol">
- <span class="name">{ if (mbr.isConstructor) tpl.name else mbr.name }</span>{
+ <span class={"name" + (if (mbr.deprecation.isDefined) " deprecated" else "") }>{ if (mbr.isConstructor) tpl.name else mbr.name }</span>{
def tparamsToHtml(tpss: List[TypeParam]): NodeSeq =
if (tpss.isEmpty) NodeSeq.Empty else {
def tparam0(tp: TypeParam): NodeSeq =
@@ -237,13 +310,23 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
}{
def paramsToHtml(vlsss: List[List[ValueParam]]): NodeSeq = {
def param0(vl: ValueParam): NodeSeq =
- <span name={ vl.name }>{ vl.name + ": " }{ typeToHtml(vl.resultType, hasLinks) }</span>
+ // notice the }{ in the next lines, they are necessary to avoid a undesired withspace in output
+ <span name={ vl.name }>{ Text(vl.name + ": ") }{ typeToHtml(vl.resultType, hasLinks) }{
+ if(!vl.defaultValue.isEmpty) {
+ Text(" = ") ++ <span class="default">{vl.defaultValue.get}</span>
+ }
+ else NodeSeq.Empty
+ }</span>
def params0(vlss: List[ValueParam]): NodeSeq = vlss match {
case Nil => NodeSeq.Empty
case vl :: Nil => param0(vl)
case vl :: vls => param0(vl) ++ Text(", ") ++ params0(vls)
}
- vlsss map { vlss => <span class="params">({ params0(vlss) })</span> }
+ def implicitCheck(vlss: List[ValueParam]): NodeSeq = vlss match {
+ case vl :: vls => if(vl.isImplicit) { <span class="implicit">implicit </span> } else Text("")
+ case _ => Text("")
+ }
+ vlsss map { vlss => <span class="params">({implicitCheck(vlss) ++ params0(vlss) })</span> }
}
mbr match {
case cls: Class if cls.isCaseClass => paramsToHtml(cls.primaryConstructor.get.valueParams)
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.png
new file mode 100644
index 0000000000..f4cca45dc0
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.psd b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.psd
new file mode 100644
index 0000000000..9fb3991b14
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.psd
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.png
new file mode 100644
index 0000000000..1fda869beb
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.psd b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.psd
new file mode 100644
index 0000000000..860833d2b5
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.psd
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css b/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css
index 0b444eec13..63a2ffaa71 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css
@@ -20,6 +20,10 @@ h1 {
display: none;
}
+#library {
+ display: none;
+}
+
#browser {
top: 0px;
left: 0px;
@@ -29,29 +33,80 @@ h1 {
position: fixed;
}
-#quickflt {
- display: block;
+#filter {
position: fixed;
- height: 22px;
- right: 0px;
- left: 0px;
- top: 0px;
+ display: block;
+ padding: 5px;
+ width: 290px;
+ right: 0;
+ left: 0;
+ top: 0;
+ background-color: grey;
+}
+
+#textfilter {
+ position: relative;
+ display: block;
+}
+
+#textfilter:before {
+ display: block;
+ content: url("filter_box_left.png");
+}
+
+#textfilter:after {
+ display: block;
position: absolute;
+ top: 0;
+ right: 0;
+ content: url("filter_box_right.png");
+}
+
+#textfilter input {
display: block;
+ position: absolute;
+ top: 0;
+ left: 32px;
+ right: 16px;
+ height: 22px;
+ width: 232px;
+ padding: 5px;
font-weight: bold;
color: #993300;
- padding: 5px;
background-color: white;
}
+#focusfilter {
+ position: relative;
+ display: block;
+ padding: 5px;
+ background-color: pink;
+}
+
+#focusfilter .focuscoll {
+ font-weight: bold;
+}
+
+#focusfilter img {
+ bottom: -2px;
+ position: relative;
+}
+
+#kindfilter {
+ position: relative;
+ display: block;
+ padding: 5px;
+ background-color: #F0F8FF;
+}
+
#tpl {
display: block;
position: fixed;
overflow: auto;
- right: 0px;
- left: 0px;
- bottom: 0px;
- top: 30px;
+ right: 0;
+ left: 0;
+ bottom: 0;
+ top: 5px;
position: absolute;
display: block;
}
@@ -64,6 +119,14 @@ h1 {
padding: 1px 4px 1px 4px;
}
+#tpl .packfocus {
+ display: block;
+ float: right;
+ font-weight: normal;
+ color: white;
+ padding: 1px 4px 1px 4px;
+}
+
#tpl .packages > li > h3 {
display: block;
background-color: #142556;
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js
index ad8a53529b..815a176390 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js
@@ -1,81 +1,277 @@
-// © 2009 EPFL/LAMP
-// written by Gilles Dubochet with contributions by Johannes Rudolph and "spiros"
-
-$(document).ready(function(){
- cleanPackage($("#tpl"));
- $("#tpl ol > li span:contains('(class)')").replaceWith("<img class='icon' src='lib/class.png'/>");
- $("#tpl ol > li span:contains('(object)')").replaceWith("<img class='icon' src='lib/object.png'/>");
- $("#tpl ol > li span:contains('(trait)')").replaceWith("<img class='icon' src='lib/trait.png'/>");
- $("#tpl ol > li span:contains('(package)')").replaceWith("<img class='icon' src='lib/package.png'/>");
- $("#tpl a[href]").click(function(event){
- $("#content>iframe").attr("src", event.currentTarget.href);
- return false;
- });
- $("#quickflt").focus(function(event) {
- $("#quickflt").select();
- });
- function search() {
- var query = $("#quickflt").attr("value");
- // Regexp that matches CamelCase subbits: "BiSe" is
- // "[a-z]*Bi[a-z]*Se" and matches "BitSet", "ABitSet", ...
- var queryRegExp = new RegExp(query.replace(/([A-Z])/g,"[a-z]*$1"));
- $("#tpl ol.templates > li").each(function(){
- var item = $(this).attr("title");
- if (item == "" || queryRegExp.test(item)) {
- $(this).show();
- $(this).removeClass("hide");
- }
- else {
- $(this).addClass("hide");
- $(this).hide();
- };
- });
- cleanPackage($("#tpl"));
- pendingTimeout = undefined;
- };
- var pendingTimeout = undefined;
- $("#quickflt").bind("keyup", function(event) {
- if (event.keyCode == 27) { // escape
- $("#quickflt").attr("value", "");
- }
- if (pendingTimeout != undefined) {
- clearTimeout(pendingTimeout);
- }
- pendingTimeout = setTimeout(search, 200); //delay 0.2 sec
- });
- $("#tpl .packages > li").prepend("<a class='packhide'>hide</a>");
- $("#tpl .packages > li > a.packhide").click(function(event){
- var action = $(this).text();
- if (action == "hide") {
- $("~ ol", $(this)).hide();
- $(this).text("show");
- }
- else {
- $("~ ol", $(this)).show();
- $(this).text("hide");
- }
- return false;
- });
+// © 2009–2010 EPFL/LAMP
+// code by Gilles Dubochet with contributions by Johannes Rudolph and "spiros"
+
+var topLevelTemplates = undefined;
+var topLevelPackages = undefined;
+
+var scheduler = undefined;
+var domCache = undefined;
+
+var kindFilterState = undefined;
+var focusFilterState = undefined;
+
+$(document).ready(function() {
+
+ // workaround for IE's iframe sizing lack of smartness
+ if($.browser.msie) {
+ function fixIFrame() {
+ $('iframe').height($(window).height() )
+ }
+ $('iframe').bind("load",fixIFrame)
+ $('iframe').bind("resize",fixIFrame)
+ }
+
+ scheduler = new Scheduler();
+ scheduler.addLabel("init", 1);
+ scheduler.addLabel("focus", 2);
+ scheduler.addLabel("kind", 3);
+ scheduler.addLabel("filter", 4);
+
+ scheduler.addForAll = function(labelName, elems, fn) {
+ var idx = 0;
+ var elem = undefined;
+ while (idx < elems.length) {
+ elem = elems[idx];
+ scheduler.add(labelName, function(elem0) { fn(elem0); }, undefined, [elem]);
+ idx = idx + 1;
+ }
+ }
+
+ domCache = new DomCache();
+ domCache.update();
+
+ prepareEntityList();
+
+ configureTextFilter();
+ configureKindFilter();
+ configureEntityList();
+
});
-/* Recusively go through the packages and show only those which contain visible items. */
-function cleanPackage(pack) {
- $("> ol.packages > li", pack).each(function(){ cleanPackage($(this)); });
- if ($("> ol > li:not(.hide)", pack).length > 0) {
- pack.show();
- pack.removeClass("hide");
- }
- else {
- pack.addClass("hide");
- pack.hide();
- };
- if ($("> ol.templates > li:not(.hide)", pack).length > 0) {
- $("> h3", pack).show();
- $("> .packhide", pack).show();
- }
- else {
- $("> h3", pack).hide();
- $("> .packhide", pack).hide();
- };
- return false;
+function configureEntityList() {
+ kindFilterSync();
+ configureHideFilter();
+ configureFocusFilter();
+ textFilter();
+}
+
+/* The DomCache class holds a series of pointers to interesting parts of the page's DOM tree. Generally, any DOM
+ accessor should be reduced to the context of a relevant entity from the cache. This is crucial to maintaining
+ decent performance of the page. */
+function DomCache() {
+ var cache = this;
+ this.packs = undefined;
+ this.liPacks = undefined;
+ this.update = function() {
+ cache.packs = $(".pack");
+ cache.liPacks = cache.packs.filter("li");
+ }
+}
+
+/* Updates the list of entities (i.e. the content of the #tpl element) from the raw form generated by Scaladoc to a
+ form suitable for display. In particular, it adds class and object etc. icons, and it configures links to open in
+ the right frame. Furthermore, it sets the two reference top-level entities lists (topLevelTemplates and
+ topLevelPackages) to serve as reference for resetting the list when needed.
+ Be advised: this function should only be called once, on page load. */
+function prepareEntityList() {
+ var classIcon = $("#library > img.class");
+ var traitIcon = $("#library > img.trait");
+ var objectIcon = $("#library > img.object");
+ var packageIcon = $("#library > img.package");
+ scheduler.addForAll("init", domCache.packs, function(pack) {
+ var packTemplates = $("> ol.templates > li", pack);
+ $("> h3 > a.tplshow", pack).add("> a.tplshow", packTemplates).attr("target", "template");
+ $("span.class", packTemplates).each(function() { $(this).replaceWith(classIcon.clone()); });
+ $("span.trait", packTemplates).each(function() { $(this).replaceWith(traitIcon.clone()); });
+ $("span.object", packTemplates).each(function() { $(this).replaceWith(objectIcon.clone()); });
+ $("span.package", packTemplates).each(function() { $(this).replaceWith(packageIcon.clone()); });
+ });
+ scheduler.add("init", function() {
+ topLevelTemplates = $("#tpl > ol.templates").clone();
+ topLevelPackages = $("#tpl > ol.packages").clone();
+ });
+}
+
+/* Configures the text filter */
+function configureTextFilter() {
+ scheduler.add("init", function() {
+ $("#filter").append("<div id='textfilter'><input type='text' accesskey='/'/></div>");
+ var input = $("#textfilter > input");
+ resizeFilterBlock();
+ input.bind("keyup", function(event) {
+ if (event.keyCode == 27) { // escape
+ input.attr("value", "");
+ }
+ textFilter();
+ });
+ input.focus(function(event) { input.select(); });
+ });
+}
+
+// Filters all focused templates and packages. This function should be made less-blocking.
+// @param query The string of the query
+function textFilter() {
+ scheduler.clear("filter");
+ scheduler.add("filter", function() {
+ var query = $("#textfilter input").attr("value")
+ var queryRegExp;
+ if (query.toLowerCase() != query) {
+ // Regexp that matches CamelCase subbits: "BiSe" is
+ // "[a-z]*Bi[a-z]*Se" and matches "BitSet", "ABitSet", ...
+ queryRegExp = new RegExp(query.replace(/([A-Z])/g,"[a-z]*$1"));
+ }
+ else { // if query is all lower case make a normal case insensitive search
+ queryRegExp = new RegExp(query, "i");
+ }
+ scheduler.addForAll("filter", domCache.packs, function(pack0) {
+ var pack = $(pack0);
+ $("> ol.templates > li", pack).each(function(){
+ var item = $(this).attr("title");
+ if (item == "" || queryRegExp.test(item)) {
+ $(this).show();
+ $(this).removeClass("hide");
+ }
+ else {
+ $(this).addClass("hide");
+ $(this).hide();
+ }
+ });
+ if ($("> ol > li:not(.hide)", pack).length > 0) {
+ pack.show();
+ pack.removeClass("hide");
+ }
+ else {
+ pack.addClass("hide");
+ pack.hide();
+ }
+ if ($("> ol.templates > li:not(.hide)", pack).length > 0) {
+ $("> h3", pack).show();
+ $("> .packhide", pack).show();
+ $("> .packfocus", pack).show();
+ }
+ else {
+ $("> h3", pack).hide();
+ $("> .packhide", pack).hide();
+ $("> .packfocus", pack).hide();
+ }
+ });
+ });
+}
+
+/* Configures the hide tool by adding the hide link to all packages. */
+function configureHideFilter() {
+ scheduler.addForAll("init", domCache.liPacks, function(pack) {
+ $(pack).prepend("<a class='packhide'>hide</a>");
+ $("> a.packhide", pack).click(function(event) {
+ var packhide = $(this)
+ var action = packhide.text();
+ if (action == "hide") {
+ $("~ ol", packhide).hide();
+ packhide.text("show");
+ }
+ else {
+ $("~ ol", packhide).show();
+ packhide.text("hide");
+ }
+ return false;
+ });
+ });
+}
+
+/* Configures the focus tool by adding the focus bar in the filter box (initially hidden), and by adding the focus
+ link to all packages. */
+function configureFocusFilter() {
+ scheduler.add("init", function() {
+ focusFilterState = null;
+ if ($("#focusfilter").length == 0) {
+ $("#filter").append("<div id='focusfilter'>focused on <span class='focuscoll'></span> <a class='focusremove'><img class='icon' src='lib/remove.png'/></a></div>");
+ $("#focusfilter > .focusremove").click(function(event) {
+ scheduler.clear("filter");
+ scheduler.add("focus", function() {
+ $("#tpl > ol.templates").replaceWith(topLevelTemplates.clone());
+ $("#tpl > ol.packages").replaceWith(topLevelPackages.clone());
+ domCache.update();
+ $("#focusfilter").hide();
+ $("#kindfilter").show();
+ resizeFilterBlock();
+ focusFilterState = null;
+ configureEntityList();
+ });
+ });
+ $("#focusfilter").hide();
+ resizeFilterBlock();
+ }
+ });
+ scheduler.addForAll("init", domCache.liPacks, function(pack) {
+ $(pack).prepend("<a class='packfocus'>focus</a>");
+ $("> a.packfocus", pack).click(function(event) {
+ focusFilter($(this).parent());
+ return false;
+ });
+ });
+}
+
+/* Focuses the entity index on a specific package. To do so, it will copy the sub-templates and sub-packages of the
+ focuses package into the top-level templates and packages position of the index. The original top-level
+ @param package The <li> element that corresponds to the package in the entity index */
+function focusFilter(package) {
+ scheduler.add("focus", function() {
+ scheduler.clear("filter");
+ var currentFocus = package.attr("title");
+ $("#focusfilter > .focuscoll").empty();
+ $("#focusfilter > .focuscoll").append(currentFocus);
+ var packTemplates = $("> ol.templates", package);
+ var packPackages = $("> ol.packages", package);
+ $("#tpl > ol.templates").replaceWith(packTemplates);
+ $("#tpl > ol.packages").replaceWith(packPackages);
+ domCache.update();
+ $("#focusfilter").show();
+ $("#kindfilter").hide();
+ resizeFilterBlock();
+ focusFilterState = package;
+ kindFilterSync();
+ });
+}
+
+function configureKindFilter() {
+ scheduler.add("init", function() {
+ kindFilterState = "all";
+ $("#filter").append("<div id='kindfilter'><a>display packages only</a></div>");
+ $("#kindfilter > a").click(function(event) { kindFilter("packs"); });
+ resizeFilterBlock();
+ });
+}
+
+function kindFilter(kind) {
+ if (kind == "packs") {
+ kindFilterState = "packs";
+ kindFilterSync();
+ $("#kindfilter > a").replaceWith("<a>display all entities</a>");
+ $("#kindfilter > a").click(function(event) { kindFilter("all"); });
+ }
+ else {
+ kindFilterState = "all";
+ kindFilterSync();
+ $("#kindfilter > a").replaceWith("<a>display packages only</a>");
+ $("#kindfilter > a").click(function(event) { kindFilter("packs"); });
+ }
+}
+
+/* Applies the kind filter. */
+function kindFilterSync() {
+ scheduler.add("kind", function () {
+ if (kindFilterState == "all" || focusFilterState != null)
+ scheduler.addForAll("kind", domCache.packs, function(pack0) {
+ $("> ol.templates", pack0).show();
+ });
+ else
+ scheduler.addForAll("kind", domCache.packs, function(pack0) {
+ $("> ol.templates", pack0).hide();
+ });
+ textFilter();
+ });
+}
+
+function resizeFilterBlock() {
+ $("#tpl").css("top", $("#filter").outerHeight(true));
}
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.js
index b1ae21d8b2..7c24308023 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.js
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.js
@@ -1,19 +1,154 @@
-/*
- * jQuery JavaScript Library v1.3.2
+/*!
+ * jQuery JavaScript Library v1.4.2
* http://jquery.com/
*
- * Copyright (c) 2009 John Resig
- * Dual licensed under the MIT and GPL licenses.
- * http://docs.jquery.com/License
+ * Copyright 2010, John Resig
+ * Dual licensed under the MIT or GPL Version 2 licenses.
+ * http://jquery.org/license
*
- * Date: 2009-02-19 17:34:21 -0500 (Thu, 19 Feb 2009)
- * Revision: 6246
- */
-(function(){var l=this,g,y=l.jQuery,p=l.$,o=l.jQuery=l.$=function(E,F){return new o.fn.init(E,F)},D=/^[^<]*(<(.|\s)+>)[^>]*$|^#([\w-]+)$/,f=/^.[^:#\[\.,]*$/;o.fn=o.prototype={init:function(E,H){E=E||document;if(E.nodeType){this[0]=E;this.length=1;this.context=E;return this}if(typeof E==="string"){var G=D.exec(E);if(G&&(G[1]||!H)){if(G[1]){E=o.clean([G[1]],H)}else{var I=document.getElementById(G[3]);if(I&&I.id!=G[3]){return o().find(E)}var F=o(I||[]);F.context=document;F.selector=E;return F}}else{return o(H).find(E)}}else{if(o.isFunction(E)){return o(document).ready(E)}}if(E.selector&&E.context){this.selector=E.selector;this.context=E.context}return this.setArray(o.isArray(E)?E:o.makeArray(E))},selector:"",jquery:"1.3.2",size:function(){return this.length},get:function(E){return E===g?Array.prototype.slice.call(this):this[E]},pushStack:function(F,H,E){var G=o(F);G.prevObject=this;G.context=this.context;if(H==="find"){G.selector=this.selector+(this.selector?" ":"")+E}else{if(H){G.selector=this.selector+"."+H+"("+E+")"}}return G},setArray:function(E){this.length=0;Array.prototype.push.apply(this,E);return this},each:function(F,E){return o.each(this,F,E)},index:function(E){return o.inArray(E&&E.jquery?E[0]:E,this)},attr:function(F,H,G){var E=F;if(typeof F==="string"){if(H===g){return this[0]&&o[G||"attr"](this[0],F)}else{E={};E[F]=H}}return this.each(function(I){for(F in E){o.attr(G?this.style:this,F,o.prop(this,E[F],G,I,F))}})},css:function(E,F){if((E=="width"||E=="height")&&parseFloat(F)<0){F=g}return this.attr(E,F,"curCSS")},text:function(F){if(typeof F!=="object"&&F!=null){return this.empty().append((this[0]&&this[0].ownerDocument||document).createTextNode(F))}var E="";o.each(F||this,function(){o.each(this.childNodes,function(){if(this.nodeType!=8){E+=this.nodeType!=1?this.nodeValue:o.fn.text([this])}})});return E},wrapAll:function(E){if(this[0]){var F=o(E,this[0].ownerDocument).clone();if(this[0].parentNode){F.insertBefore(this[0])}F.map(function(){var G=this;while(G.firstChild){G=G.firstChild}return G}).append(this)}return this},wrapInner:function(E){return this.each(function(){o(this).contents().wrapAll(E)})},wrap:function(E){return this.each(function(){o(this).wrapAll(E)})},append:function(){return this.domManip(arguments,true,function(E){if(this.nodeType==1){this.appendChild(E)}})},prepend:function(){return this.domManip(arguments,true,function(E){if(this.nodeType==1){this.insertBefore(E,this.firstChild)}})},before:function(){return this.domManip(arguments,false,function(E){this.parentNode.insertBefore(E,this)})},after:function(){return this.domManip(arguments,false,function(E){this.parentNode.insertBefore(E,this.nextSibling)})},end:function(){return this.prevObject||o([])},push:[].push,sort:[].sort,splice:[].splice,find:function(E){if(this.length===1){var F=this.pushStack([],"find",E);F.length=0;o.find(E,this[0],F);return F}else{return this.pushStack(o.unique(o.map(this,function(G){return o.find(E,G)})),"find",E)}},clone:function(G){var E=this.map(function(){if(!o.support.noCloneEvent&&!o.isXMLDoc(this)){var I=this.outerHTML;if(!I){var J=this.ownerDocument.createElement("div");J.appendChild(this.cloneNode(true));I=J.innerHTML}return o.clean([I.replace(/ jQuery\d+="(?:\d+|null)"/g,"").replace(/^\s*/,"")])[0]}else{return this.cloneNode(true)}});if(G===true){var H=this.find("*").andSelf(),F=0;E.find("*").andSelf().each(function(){if(this.nodeName!==H[F].nodeName){return}var I=o.data(H[F],"events");for(var K in I){for(var J in I[K]){o.event.add(this,K,I[K][J],I[K][J].data)}}F++})}return E},filter:function(E){return this.pushStack(o.isFunction(E)&&o.grep(this,function(G,F){return E.call(G,F)})||o.multiFilter(E,o.grep(this,function(F){return F.nodeType===1})),"filter",E)},closest:function(E){var G=o.expr.match.POS.test(E)?o(E):null,F=0;return this.map(function(){var H=this;while(H&&H.ownerDocument){if(G?G.index(H)>-1:o(H).is(E)){o.data(H,"closest",F);return H}H=H.parentNode;F++}})},not:function(E){if(typeof E==="string"){if(f.test(E)){return this.pushStack(o.multiFilter(E,this,true),"not",E)}else{E=o.multiFilter(E,this)}}var F=E.length&&E[E.length-1]!==g&&!E.nodeType;return this.filter(function(){return F?o.inArray(this,E)<0:this!=E})},add:function(E){return this.pushStack(o.unique(o.merge(this.get(),typeof E==="string"?o(E):o.makeArray(E))))},is:function(E){return !!E&&o.multiFilter(E,this).length>0},hasClass:function(E){return !!E&&this.is("."+E)},val:function(K){if(K===g){var E=this[0];if(E){if(o.nodeName(E,"option")){return(E.attributes.value||{}).specified?E.value:E.text}if(o.nodeName(E,"select")){var I=E.selectedIndex,L=[],M=E.options,H=E.type=="select-one";if(I<0){return null}for(var F=H?I:0,J=H?I+1:M.length;F<J;F++){var G=M[F];if(G.selected){K=o(G).val();if(H){return K}L.push(K)}}return L}return(E.value||"").replace(/\r/g,"")}return g}if(typeof K==="number"){K+=""}return this.each(function(){if(this.nodeType!=1){return}if(o.isArray(K)&&/radio|checkbox/.test(this.type)){this.checked=(o.inArray(this.value,K)>=0||o.inArray(this.name,K)>=0)}else{if(o.nodeName(this,"select")){var N=o.makeArray(K);o("option",this).each(function(){this.selected=(o.inArray(this.value,N)>=0||o.inArray(this.text,N)>=0)});if(!N.length){this.selectedIndex=-1}}else{this.value=K}}})},html:function(E){return E===g?(this[0]?this[0].innerHTML.replace(/ jQuery\d+="(?:\d+|null)"/g,""):null):this.empty().append(E)},replaceWith:function(E){return this.after(E).remove()},eq:function(E){return this.slice(E,+E+1)},slice:function(){return this.pushStack(Array.prototype.slice.apply(this,arguments),"slice",Array.prototype.slice.call(arguments).join(","))},map:function(E){return this.pushStack(o.map(this,function(G,F){return E.call(G,F,G)}))},andSelf:function(){return this.add(this.prevObject)},domManip:function(J,M,L){if(this[0]){var I=(this[0].ownerDocument||this[0]).createDocumentFragment(),F=o.clean(J,(this[0].ownerDocument||this[0]),I),H=I.firstChild;if(H){for(var G=0,E=this.length;G<E;G++){L.call(K(this[G],H),this.length>1||G>0?I.cloneNode(true):I)}}if(F){o.each(F,z)}}return this;function K(N,O){return M&&o.nodeName(N,"table")&&o.nodeName(O,"tr")?(N.getElementsByTagName("tbody")[0]||N.appendChild(N.ownerDocument.createElement("tbody"))):N}}};o.fn.init.prototype=o.fn;function z(E,F){if(F.src){o.ajax({url:F.src,async:false,dataType:"script"})}else{o.globalEval(F.text||F.textContent||F.innerHTML||"")}if(F.parentNode){F.parentNode.removeChild(F)}}function e(){return +new Date}o.extend=o.fn.extend=function(){var J=arguments[0]||{},H=1,I=arguments.length,E=false,G;if(typeof J==="boolean"){E=J;J=arguments[1]||{};H=2}if(typeof J!=="object"&&!o.isFunction(J)){J={}}if(I==H){J=this;--H}for(;H<I;H++){if((G=arguments[H])!=null){for(var F in G){var K=J[F],L=G[F];if(J===L){continue}if(E&&L&&typeof L==="object"&&!L.nodeType){J[F]=o.extend(E,K||(L.length!=null?[]:{}),L)}else{if(L!==g){J[F]=L}}}}}return J};var b=/z-?index|font-?weight|opacity|zoom|line-?height/i,q=document.defaultView||{},s=Object.prototype.toString;o.extend({noConflict:function(E){l.$=p;if(E){l.jQuery=y}return o},isFunction:function(E){return s.call(E)==="[object Function]"},isArray:function(E){return s.call(E)==="[object Array]"},isXMLDoc:function(E){return E.nodeType===9&&E.documentElement.nodeName!=="HTML"||!!E.ownerDocument&&o.isXMLDoc(E.ownerDocument)},globalEval:function(G){if(G&&/\S/.test(G)){var F=document.getElementsByTagName("head")[0]||document.documentElement,E=document.createElement("script");E.type="text/javascript";if(o.support.scriptEval){E.appendChild(document.createTextNode(G))}else{E.text=G}F.insertBefore(E,F.firstChild);F.removeChild(E)}},nodeName:function(F,E){return F.nodeName&&F.nodeName.toUpperCase()==E.toUpperCase()},each:function(G,K,F){var E,H=0,I=G.length;if(F){if(I===g){for(E in G){if(K.apply(G[E],F)===false){break}}}else{for(;H<I;){if(K.apply(G[H++],F)===false){break}}}}else{if(I===g){for(E in G){if(K.call(G[E],E,G[E])===false){break}}}else{for(var J=G[0];H<I&&K.call(J,H,J)!==false;J=G[++H]){}}}return G},prop:function(H,I,G,F,E){if(o.isFunction(I)){I=I.call(H,F)}return typeof I==="number"&&G=="curCSS"&&!b.test(E)?I+"px":I},className:{add:function(E,F){o.each((F||"").split(/\s+/),function(G,H){if(E.nodeType==1&&!o.className.has(E.className,H)){E.className+=(E.className?" ":"")+H}})},remove:function(E,F){if(E.nodeType==1){E.className=F!==g?o.grep(E.className.split(/\s+/),function(G){return !o.className.has(F,G)}).join(" "):""}},has:function(F,E){return F&&o.inArray(E,(F.className||F).toString().split(/\s+/))>-1}},swap:function(H,G,I){var E={};for(var F in G){E[F]=H.style[F];H.style[F]=G[F]}I.call(H);for(var F in G){H.style[F]=E[F]}},css:function(H,F,J,E){if(F=="width"||F=="height"){var L,G={position:"absolute",visibility:"hidden",display:"block"},K=F=="width"?["Left","Right"]:["Top","Bottom"];function I(){L=F=="width"?H.offsetWidth:H.offsetHeight;if(E==="border"){return}o.each(K,function(){if(!E){L-=parseFloat(o.curCSS(H,"padding"+this,true))||0}if(E==="margin"){L+=parseFloat(o.curCSS(H,"margin"+this,true))||0}else{L-=parseFloat(o.curCSS(H,"border"+this+"Width",true))||0}})}if(H.offsetWidth!==0){I()}else{o.swap(H,G,I)}return Math.max(0,Math.round(L))}return o.curCSS(H,F,J)},curCSS:function(I,F,G){var L,E=I.style;if(F=="opacity"&&!o.support.opacity){L=o.attr(E,"opacity");return L==""?"1":L}if(F.match(/float/i)){F=w}if(!G&&E&&E[F]){L=E[F]}else{if(q.getComputedStyle){if(F.match(/float/i)){F="float"}F=F.replace(/([A-Z])/g,"-$1").toLowerCase();var M=q.getComputedStyle(I,null);if(M){L=M.getPropertyValue(F)}if(F=="opacity"&&L==""){L="1"}}else{if(I.currentStyle){var J=F.replace(/\-(\w)/g,function(N,O){return O.toUpperCase()});L=I.currentStyle[F]||I.currentStyle[J];if(!/^\d+(px)?$/i.test(L)&&/^\d/.test(L)){var H=E.left,K=I.runtimeStyle.left;I.runtimeStyle.left=I.currentStyle.left;E.left=L||0;L=E.pixelLeft+"px";E.left=H;I.runtimeStyle.left=K}}}}return L},clean:function(F,K,I){K=K||document;if(typeof K.createElement==="undefined"){K=K.ownerDocument||K[0]&&K[0].ownerDocument||document}if(!I&&F.length===1&&typeof F[0]==="string"){var H=/^<(\w+)\s*\/?>$/.exec(F[0]);if(H){return[K.createElement(H[1])]}}var G=[],E=[],L=K.createElement("div");o.each(F,function(P,S){if(typeof S==="number"){S+=""}if(!S){return}if(typeof S==="string"){S=S.replace(/(<(\w+)[^>]*?)\/>/g,function(U,V,T){return T.match(/^(abbr|br|col|img|input|link|meta|param|hr|area|embed)$/i)?U:V+"></"+T+">"});var O=S.replace(/^\s+/,"").substring(0,10).toLowerCase();var Q=!O.indexOf("<opt")&&[1,"<select multiple='multiple'>","</select>"]||!O.indexOf("<leg")&&[1,"<fieldset>","</fieldset>"]||O.match(/^<(thead|tbody|tfoot|colg|cap)/)&&[1,"<table>","</table>"]||!O.indexOf("<tr")&&[2,"<table><tbody>","</tbody></table>"]||(!O.indexOf("<td")||!O.indexOf("<th"))&&[3,"<table><tbody><tr>","</tr></tbody></table>"]||!O.indexOf("<col")&&[2,"<table><tbody></tbody><colgroup>","</colgroup></table>"]||!o.support.htmlSerialize&&[1,"div<div>","</div>"]||[0,"",""];L.innerHTML=Q[1]+S+Q[2];while(Q[0]--){L=L.lastChild}if(!o.support.tbody){var R=/<tbody/i.test(S),N=!O.indexOf("<table")&&!R?L.firstChild&&L.firstChild.childNodes:Q[1]=="<table>"&&!R?L.childNodes:[];for(var M=N.length-1;M>=0;--M){if(o.nodeName(N[M],"tbody")&&!N[M].childNodes.length){N[M].parentNode.removeChild(N[M])}}}if(!o.support.leadingWhitespace&&/^\s/.test(S)){L.insertBefore(K.createTextNode(S.match(/^\s*/)[0]),L.firstChild)}S=o.makeArray(L.childNodes)}if(S.nodeType){G.push(S)}else{G=o.merge(G,S)}});if(I){for(var J=0;G[J];J++){if(o.nodeName(G[J],"script")&&(!G[J].type||G[J].type.toLowerCase()==="text/javascript")){E.push(G[J].parentNode?G[J].parentNode.removeChild(G[J]):G[J])}else{if(G[J].nodeType===1){G.splice.apply(G,[J+1,0].concat(o.makeArray(G[J].getElementsByTagName("script"))))}I.appendChild(G[J])}}return E}return G},attr:function(J,G,K){if(!J||J.nodeType==3||J.nodeType==8){return g}var H=!o.isXMLDoc(J),L=K!==g;G=H&&o.props[G]||G;if(J.tagName){var F=/href|src|style/.test(G);if(G=="selected"&&J.parentNode){J.parentNode.selectedIndex}if(G in J&&H&&!F){if(L){if(G=="type"&&o.nodeName(J,"input")&&J.parentNode){throw"type property can't be changed"}J[G]=K}if(o.nodeName(J,"form")&&J.getAttributeNode(G)){return J.getAttributeNode(G).nodeValue}if(G=="tabIndex"){var I=J.getAttributeNode("tabIndex");return I&&I.specified?I.value:J.nodeName.match(/(button|input|object|select|textarea)/i)?0:J.nodeName.match(/^(a|area)$/i)&&J.href?0:g}return J[G]}if(!o.support.style&&H&&G=="style"){return o.attr(J.style,"cssText",K)}if(L){J.setAttribute(G,""+K)}var E=!o.support.hrefNormalized&&H&&F?J.getAttribute(G,2):J.getAttribute(G);return E===null?g:E}if(!o.support.opacity&&G=="opacity"){if(L){J.zoom=1;J.filter=(J.filter||"").replace(/alpha\([^)]*\)/,"")+(parseInt(K)+""=="NaN"?"":"alpha(opacity="+K*100+")")}return J.filter&&J.filter.indexOf("opacity=")>=0?(parseFloat(J.filter.match(/opacity=([^)]*)/)[1])/100)+"":""}G=G.replace(/-([a-z])/ig,function(M,N){return N.toUpperCase()});if(L){J[G]=K}return J[G]},trim:function(E){return(E||"").replace(/^\s+|\s+$/g,"")},makeArray:function(G){var E=[];if(G!=null){var F=G.length;if(F==null||typeof G==="string"||o.isFunction(G)||G.setInterval){E[0]=G}else{while(F){E[--F]=G[F]}}}return E},inArray:function(G,H){for(var E=0,F=H.length;E<F;E++){if(H[E]===G){return E}}return -1},merge:function(H,E){var F=0,G,I=H.length;if(!o.support.getAll){while((G=E[F++])!=null){if(G.nodeType!=8){H[I++]=G}}}else{while((G=E[F++])!=null){H[I++]=G}}return H},unique:function(K){var F=[],E={};try{for(var G=0,H=K.length;G<H;G++){var J=o.data(K[G]);if(!E[J]){E[J]=true;F.push(K[G])}}}catch(I){F=K}return F},grep:function(F,J,E){var G=[];for(var H=0,I=F.length;H<I;H++){if(!E!=!J(F[H],H)){G.push(F[H])}}return G},map:function(E,J){var F=[];for(var G=0,H=E.length;G<H;G++){var I=J(E[G],G);if(I!=null){F[F.length]=I}}return F.concat.apply([],F)}});var C=navigator.userAgent.toLowerCase();o.browser={version:(C.match(/.+(?:rv|it|ra|ie)[\/: ]([\d.]+)/)||[0,"0"])[1],safari:/webkit/.test(C),opera:/opera/.test(C),msie:/msie/.test(C)&&!/opera/.test(C),mozilla:/mozilla/.test(C)&&!/(compatible|webkit)/.test(C)};o.each({parent:function(E){return E.parentNode},parents:function(E){return o.dir(E,"parentNode")},next:function(E){return o.nth(E,2,"nextSibling")},prev:function(E){return o.nth(E,2,"previousSibling")},nextAll:function(E){return o.dir(E,"nextSibling")},prevAll:function(E){return o.dir(E,"previousSibling")},siblings:function(E){return o.sibling(E.parentNode.firstChild,E)},children:function(E){return o.sibling(E.firstChild)},contents:function(E){return o.nodeName(E,"iframe")?E.contentDocument||E.contentWindow.document:o.makeArray(E.childNodes)}},function(E,F){o.fn[E]=function(G){var H=o.map(this,F);if(G&&typeof G=="string"){H=o.multiFilter(G,H)}return this.pushStack(o.unique(H),E,G)}});o.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(E,F){o.fn[E]=function(G){var J=[],L=o(G);for(var K=0,H=L.length;K<H;K++){var I=(K>0?this.clone(true):this).get();o.fn[F].apply(o(L[K]),I);J=J.concat(I)}return this.pushStack(J,E,G)}});o.each({removeAttr:function(E){o.attr(this,E,"");if(this.nodeType==1){this.removeAttribute(E)}},addClass:function(E){o.className.add(this,E)},removeClass:function(E){o.className.remove(this,E)},toggleClass:function(F,E){if(typeof E!=="boolean"){E=!o.className.has(this,F)}o.className[E?"add":"remove"](this,F)},remove:function(E){if(!E||o.filter(E,[this]).length){o("*",this).add([this]).each(function(){o.event.remove(this);o.removeData(this)});if(this.parentNode){this.parentNode.removeChild(this)}}},empty:function(){o(this).children().remove();while(this.firstChild){this.removeChild(this.firstChild)}}},function(E,F){o.fn[E]=function(){return this.each(F,arguments)}});function j(E,F){return E[0]&&parseInt(o.curCSS(E[0],F,true),10)||0}var h="jQuery"+e(),v=0,A={};o.extend({cache:{},data:function(F,E,G){F=F==l?A:F;var H=F[h];if(!H){H=F[h]=++v}if(E&&!o.cache[H]){o.cache[H]={}}if(G!==g){o.cache[H][E]=G}return E?o.cache[H][E]:H},removeData:function(F,E){F=F==l?A:F;var H=F[h];if(E){if(o.cache[H]){delete o.cache[H][E];E="";for(E in o.cache[H]){break}if(!E){o.removeData(F)}}}else{try{delete F[h]}catch(G){if(F.removeAttribute){F.removeAttribute(h)}}delete o.cache[H]}},queue:function(F,E,H){if(F){E=(E||"fx")+"queue";var G=o.data(F,E);if(!G||o.isArray(H)){G=o.data(F,E,o.makeArray(H))}else{if(H){G.push(H)}}}return G},dequeue:function(H,G){var E=o.queue(H,G),F=E.shift();if(!G||G==="fx"){F=E[0]}if(F!==g){F.call(H)}}});o.fn.extend({data:function(E,G){var H=E.split(".");H[1]=H[1]?"."+H[1]:"";if(G===g){var F=this.triggerHandler("getData"+H[1]+"!",[H[0]]);if(F===g&&this.length){F=o.data(this[0],E)}return F===g&&H[1]?this.data(H[0]):F}else{return this.trigger("setData"+H[1]+"!",[H[0],G]).each(function(){o.data(this,E,G)})}},removeData:function(E){return this.each(function(){o.removeData(this,E)})},queue:function(E,F){if(typeof E!=="string"){F=E;E="fx"}if(F===g){return o.queue(this[0],E)}return this.each(function(){var G=o.queue(this,E,F);if(E=="fx"&&G.length==1){G[0].call(this)}})},dequeue:function(E){return this.each(function(){o.dequeue(this,E)})}});
-/*
- * Sizzle CSS Selector Engine - v0.9.3
- * Copyright 2009, The Dojo Foundation
- * Released under the MIT, BSD, and GPL Licenses.
- * More information: http://sizzlejs.com/
+ * Includes Sizzle.js
+ * http://sizzlejs.com/
+ * Copyright 2010, The Dojo Foundation
+ * Released under the MIT, BSD, and GPL Licenses.
+ *
+ * Date: Sat Feb 13 22:33:48 2010 -0500
*/
-(function(){var R=/((?:\((?:\([^()]+\)|[^()]+)+\)|\[(?:\[[^[\]]*\]|['"][^'"]*['"]|[^[\]'"]+)+\]|\\.|[^ >+~,(\[\\]+)+|[>+~])(\s*,\s*)?/g,L=0,H=Object.prototype.toString;var F=function(Y,U,ab,ac){ab=ab||[];U=U||document;if(U.nodeType!==1&&U.nodeType!==9){return[]}if(!Y||typeof Y!=="string"){return ab}var Z=[],W,af,ai,T,ad,V,X=true;R.lastIndex=0;while((W=R.exec(Y))!==null){Z.push(W[1]);if(W[2]){V=RegExp.rightContext;break}}if(Z.length>1&&M.exec(Y)){if(Z.length===2&&I.relative[Z[0]]){af=J(Z[0]+Z[1],U)}else{af=I.relative[Z[0]]?[U]:F(Z.shift(),U);while(Z.length){Y=Z.shift();if(I.relative[Y]){Y+=Z.shift()}af=J(Y,af)}}}else{var ae=ac?{expr:Z.pop(),set:E(ac)}:F.find(Z.pop(),Z.length===1&&U.parentNode?U.parentNode:U,Q(U));af=F.filter(ae.expr,ae.set);if(Z.length>0){ai=E(af)}else{X=false}while(Z.length){var ah=Z.pop(),ag=ah;if(!I.relative[ah]){ah=""}else{ag=Z.pop()}if(ag==null){ag=U}I.relative[ah](ai,ag,Q(U))}}if(!ai){ai=af}if(!ai){throw"Syntax error, unrecognized expression: "+(ah||Y)}if(H.call(ai)==="[object Array]"){if(!X){ab.push.apply(ab,ai)}else{if(U.nodeType===1){for(var aa=0;ai[aa]!=null;aa++){if(ai[aa]&&(ai[aa]===true||ai[aa].nodeType===1&&K(U,ai[aa]))){ab.push(af[aa])}}}else{for(var aa=0;ai[aa]!=null;aa++){if(ai[aa]&&ai[aa].nodeType===1){ab.push(af[aa])}}}}}else{E(ai,ab)}if(V){F(V,U,ab,ac);if(G){hasDuplicate=false;ab.sort(G);if(hasDuplicate){for(var aa=1;aa<ab.length;aa++){if(ab[aa]===ab[aa-1]){ab.splice(aa--,1)}}}}}return ab};F.matches=function(T,U){return F(T,null,null,U)};F.find=function(aa,T,ab){var Z,X;if(!aa){return[]}for(var W=0,V=I.order.length;W<V;W++){var Y=I.order[W],X;if((X=I.match[Y].exec(aa))){var U=RegExp.leftContext;if(U.substr(U.length-1)!=="\\"){X[1]=(X[1]||"").replace(/\\/g,"");Z=I.find[Y](X,T,ab);if(Z!=null){aa=aa.replace(I.match[Y],"");break}}}}if(!Z){Z=T.getElementsByTagName("*")}return{set:Z,expr:aa}};F.filter=function(ad,ac,ag,W){var V=ad,ai=[],aa=ac,Y,T,Z=ac&&ac[0]&&Q(ac[0]);while(ad&&ac.length){for(var ab in I.filter){if((Y=I.match[ab].exec(ad))!=null){var U=I.filter[ab],ah,af;T=false;if(aa==ai){ai=[]}if(I.preFilter[ab]){Y=I.preFilter[ab](Y,aa,ag,ai,W,Z);if(!Y){T=ah=true}else{if(Y===true){continue}}}if(Y){for(var X=0;(af=aa[X])!=null;X++){if(af){ah=U(af,Y,X,aa);var ae=W^!!ah;if(ag&&ah!=null){if(ae){T=true}else{aa[X]=false}}else{if(ae){ai.push(af);T=true}}}}}if(ah!==g){if(!ag){aa=ai}ad=ad.replace(I.match[ab],"");if(!T){return[]}break}}}if(ad==V){if(T==null){throw"Syntax error, unrecognized expression: "+ad}else{break}}V=ad}return aa};var I=F.selectors={order:["ID","NAME","TAG"],match:{ID:/#((?:[\w\u00c0-\uFFFF_-]|\\.)+)/,CLASS:/\.((?:[\w\u00c0-\uFFFF_-]|\\.)+)/,NAME:/\[name=['"]*((?:[\w\u00c0-\uFFFF_-]|\\.)+)['"]*\]/,ATTR:/\[\s*((?:[\w\u00c0-\uFFFF_-]|\\.)+)\s*(?:(\S?=)\s*(['"]*)(.*?)\3|)\s*\]/,TAG:/^((?:[\w\u00c0-\uFFFF\*_-]|\\.)+)/,CHILD:/:(only|nth|last|first)-child(?:\((even|odd|[\dn+-]*)\))?/,POS:/:(nth|eq|gt|lt|first|last|even|odd)(?:\((\d*)\))?(?=[^-]|$)/,PSEUDO:/:((?:[\w\u00c0-\uFFFF_-]|\\.)+)(?:\((['"]*)((?:\([^\)]+\)|[^\2\(\)]*)+)\2\))?/},attrMap:{"class":"className","for":"htmlFor"},attrHandle:{href:function(T){return T.getAttribute("href")}},relative:{"+":function(aa,T,Z){var X=typeof T==="string",ab=X&&!/\W/.test(T),Y=X&&!ab;if(ab&&!Z){T=T.toUpperCase()}for(var W=0,V=aa.length,U;W<V;W++){if((U=aa[W])){while((U=U.previousSibling)&&U.nodeType!==1){}aa[W]=Y||U&&U.nodeName===T?U||false:U===T}}if(Y){F.filter(T,aa,true)}},">":function(Z,U,aa){var X=typeof U==="string";if(X&&!/\W/.test(U)){U=aa?U:U.toUpperCase();for(var V=0,T=Z.length;V<T;V++){var Y=Z[V];if(Y){var W=Y.parentNode;Z[V]=W.nodeName===U?W:false}}}else{for(var V=0,T=Z.length;V<T;V++){var Y=Z[V];if(Y){Z[V]=X?Y.parentNode:Y.parentNode===U}}if(X){F.filter(U,Z,true)}}},"":function(W,U,Y){var V=L++,T=S;if(!U.match(/\W/)){var X=U=Y?U:U.toUpperCase();T=P}T("parentNode",U,V,W,X,Y)},"~":function(W,U,Y){var V=L++,T=S;if(typeof U==="string"&&!U.match(/\W/)){var X=U=Y?U:U.toUpperCase();T=P}T("previousSibling",U,V,W,X,Y)}},find:{ID:function(U,V,W){if(typeof V.getElementById!=="undefined"&&!W){var T=V.getElementById(U[1]);return T?[T]:[]}},NAME:function(V,Y,Z){if(typeof Y.getElementsByName!=="undefined"){var U=[],X=Y.getElementsByName(V[1]);for(var W=0,T=X.length;W<T;W++){if(X[W].getAttribute("name")===V[1]){U.push(X[W])}}return U.length===0?null:U}},TAG:function(T,U){return U.getElementsByTagName(T[1])}},preFilter:{CLASS:function(W,U,V,T,Z,aa){W=" "+W[1].replace(/\\/g,"")+" ";if(aa){return W}for(var X=0,Y;(Y=U[X])!=null;X++){if(Y){if(Z^(Y.className&&(" "+Y.className+" ").indexOf(W)>=0)){if(!V){T.push(Y)}}else{if(V){U[X]=false}}}}return false},ID:function(T){return T[1].replace(/\\/g,"")},TAG:function(U,T){for(var V=0;T[V]===false;V++){}return T[V]&&Q(T[V])?U[1]:U[1].toUpperCase()},CHILD:function(T){if(T[1]=="nth"){var U=/(-?)(\d*)n((?:\+|-)?\d*)/.exec(T[2]=="even"&&"2n"||T[2]=="odd"&&"2n+1"||!/\D/.test(T[2])&&"0n+"+T[2]||T[2]);T[2]=(U[1]+(U[2]||1))-0;T[3]=U[3]-0}T[0]=L++;return T},ATTR:function(X,U,V,T,Y,Z){var W=X[1].replace(/\\/g,"");if(!Z&&I.attrMap[W]){X[1]=I.attrMap[W]}if(X[2]==="~="){X[4]=" "+X[4]+" "}return X},PSEUDO:function(X,U,V,T,Y){if(X[1]==="not"){if(X[3].match(R).length>1||/^\w/.test(X[3])){X[3]=F(X[3],null,null,U)}else{var W=F.filter(X[3],U,V,true^Y);if(!V){T.push.apply(T,W)}return false}}else{if(I.match.POS.test(X[0])||I.match.CHILD.test(X[0])){return true}}return X},POS:function(T){T.unshift(true);return T}},filters:{enabled:function(T){return T.disabled===false&&T.type!=="hidden"},disabled:function(T){return T.disabled===true},checked:function(T){return T.checked===true},selected:function(T){T.parentNode.selectedIndex;return T.selected===true},parent:function(T){return !!T.firstChild},empty:function(T){return !T.firstChild},has:function(V,U,T){return !!F(T[3],V).length},header:function(T){return/h\d/i.test(T.nodeName)},text:function(T){return"text"===T.type},radio:function(T){return"radio"===T.type},checkbox:function(T){return"checkbox"===T.type},file:function(T){return"file"===T.type},password:function(T){return"password"===T.type},submit:function(T){return"submit"===T.type},image:function(T){return"image"===T.type},reset:function(T){return"reset"===T.type},button:function(T){return"button"===T.type||T.nodeName.toUpperCase()==="BUTTON"},input:function(T){return/input|select|textarea|button/i.test(T.nodeName)}},setFilters:{first:function(U,T){return T===0},last:function(V,U,T,W){return U===W.length-1},even:function(U,T){return T%2===0},odd:function(U,T){return T%2===1},lt:function(V,U,T){return U<T[3]-0},gt:function(V,U,T){return U>T[3]-0},nth:function(V,U,T){return T[3]-0==U},eq:function(V,U,T){return T[3]-0==U}},filter:{PSEUDO:function(Z,V,W,aa){var U=V[1],X=I.filters[U];if(X){return X(Z,W,V,aa)}else{if(U==="contains"){return(Z.textContent||Z.innerText||"").indexOf(V[3])>=0}else{if(U==="not"){var Y=V[3];for(var W=0,T=Y.length;W<T;W++){if(Y[W]===Z){return false}}return true}}}},CHILD:function(T,W){var Z=W[1],U=T;switch(Z){case"only":case"first":while(U=U.previousSibling){if(U.nodeType===1){return false}}if(Z=="first"){return true}U=T;case"last":while(U=U.nextSibling){if(U.nodeType===1){return false}}return true;case"nth":var V=W[2],ac=W[3];if(V==1&&ac==0){return true}var Y=W[0],ab=T.parentNode;if(ab&&(ab.sizcache!==Y||!T.nodeIndex)){var X=0;for(U=ab.firstChild;U;U=U.nextSibling){if(U.nodeType===1){U.nodeIndex=++X}}ab.sizcache=Y}var aa=T.nodeIndex-ac;if(V==0){return aa==0}else{return(aa%V==0&&aa/V>=0)}}},ID:function(U,T){return U.nodeType===1&&U.getAttribute("id")===T},TAG:function(U,T){return(T==="*"&&U.nodeType===1)||U.nodeName===T},CLASS:function(U,T){return(" "+(U.className||U.getAttribute("class"))+" ").indexOf(T)>-1},ATTR:function(Y,W){var V=W[1],T=I.attrHandle[V]?I.attrHandle[V](Y):Y[V]!=null?Y[V]:Y.getAttribute(V),Z=T+"",X=W[2],U=W[4];return T==null?X==="!=":X==="="?Z===U:X==="*="?Z.indexOf(U)>=0:X==="~="?(" "+Z+" ").indexOf(U)>=0:!U?Z&&T!==false:X==="!="?Z!=U:X==="^="?Z.indexOf(U)===0:X==="$="?Z.substr(Z.length-U.length)===U:X==="|="?Z===U||Z.substr(0,U.length+1)===U+"-":false},POS:function(X,U,V,Y){var T=U[2],W=I.setFilters[T];if(W){return W(X,V,U,Y)}}}};var M=I.match.POS;for(var O in I.match){I.match[O]=RegExp(I.match[O].source+/(?![^\[]*\])(?![^\(]*\))/.source)}var E=function(U,T){U=Array.prototype.slice.call(U);if(T){T.push.apply(T,U);return T}return U};try{Array.prototype.slice.call(document.documentElement.childNodes)}catch(N){E=function(X,W){var U=W||[];if(H.call(X)==="[object Array]"){Array.prototype.push.apply(U,X)}else{if(typeof X.length==="number"){for(var V=0,T=X.length;V<T;V++){U.push(X[V])}}else{for(var V=0;X[V];V++){U.push(X[V])}}}return U}}var G;if(document.documentElement.compareDocumentPosition){G=function(U,T){var V=U.compareDocumentPosition(T)&4?-1:U===T?0:1;if(V===0){hasDuplicate=true}return V}}else{if("sourceIndex" in document.documentElement){G=function(U,T){var V=U.sourceIndex-T.sourceIndex;if(V===0){hasDuplicate=true}return V}}else{if(document.createRange){G=function(W,U){var V=W.ownerDocument.createRange(),T=U.ownerDocument.createRange();V.selectNode(W);V.collapse(true);T.selectNode(U);T.collapse(true);var X=V.compareBoundaryPoints(Range.START_TO_END,T);if(X===0){hasDuplicate=true}return X}}}}(function(){var U=document.createElement("form"),V="script"+(new Date).getTime();U.innerHTML="<input name='"+V+"'/>";var T=document.documentElement;T.insertBefore(U,T.firstChild);if(!!document.getElementById(V)){I.find.ID=function(X,Y,Z){if(typeof Y.getElementById!=="undefined"&&!Z){var W=Y.getElementById(X[1]);return W?W.id===X[1]||typeof W.getAttributeNode!=="undefined"&&W.getAttributeNode("id").nodeValue===X[1]?[W]:g:[]}};I.filter.ID=function(Y,W){var X=typeof Y.getAttributeNode!=="undefined"&&Y.getAttributeNode("id");return Y.nodeType===1&&X&&X.nodeValue===W}}T.removeChild(U)})();(function(){var T=document.createElement("div");T.appendChild(document.createComment(""));if(T.getElementsByTagName("*").length>0){I.find.TAG=function(U,Y){var X=Y.getElementsByTagName(U[1]);if(U[1]==="*"){var W=[];for(var V=0;X[V];V++){if(X[V].nodeType===1){W.push(X[V])}}X=W}return X}}T.innerHTML="<a href='#'></a>";if(T.firstChild&&typeof T.firstChild.getAttribute!=="undefined"&&T.firstChild.getAttribute("href")!=="#"){I.attrHandle.href=function(U){return U.getAttribute("href",2)}}})();if(document.querySelectorAll){(function(){var T=F,U=document.createElement("div");U.innerHTML="<p class='TEST'></p>";if(U.querySelectorAll&&U.querySelectorAll(".TEST").length===0){return}F=function(Y,X,V,W){X=X||document;if(!W&&X.nodeType===9&&!Q(X)){try{return E(X.querySelectorAll(Y),V)}catch(Z){}}return T(Y,X,V,W)};F.find=T.find;F.filter=T.filter;F.selectors=T.selectors;F.matches=T.matches})()}if(document.getElementsByClassName&&document.documentElement.getElementsByClassName){(function(){var T=document.createElement("div");T.innerHTML="<div class='test e'></div><div class='test'></div>";if(T.getElementsByClassName("e").length===0){return}T.lastChild.className="e";if(T.getElementsByClassName("e").length===1){return}I.order.splice(1,0,"CLASS");I.find.CLASS=function(U,V,W){if(typeof V.getElementsByClassName!=="undefined"&&!W){return V.getElementsByClassName(U[1])}}})()}function P(U,Z,Y,ad,aa,ac){var ab=U=="previousSibling"&&!ac;for(var W=0,V=ad.length;W<V;W++){var T=ad[W];if(T){if(ab&&T.nodeType===1){T.sizcache=Y;T.sizset=W}T=T[U];var X=false;while(T){if(T.sizcache===Y){X=ad[T.sizset];break}if(T.nodeType===1&&!ac){T.sizcache=Y;T.sizset=W}if(T.nodeName===Z){X=T;break}T=T[U]}ad[W]=X}}}function S(U,Z,Y,ad,aa,ac){var ab=U=="previousSibling"&&!ac;for(var W=0,V=ad.length;W<V;W++){var T=ad[W];if(T){if(ab&&T.nodeType===1){T.sizcache=Y;T.sizset=W}T=T[U];var X=false;while(T){if(T.sizcache===Y){X=ad[T.sizset];break}if(T.nodeType===1){if(!ac){T.sizcache=Y;T.sizset=W}if(typeof Z!=="string"){if(T===Z){X=true;break}}else{if(F.filter(Z,[T]).length>0){X=T;break}}}T=T[U]}ad[W]=X}}}var K=document.compareDocumentPosition?function(U,T){return U.compareDocumentPosition(T)&16}:function(U,T){return U!==T&&(U.contains?U.contains(T):true)};var Q=function(T){return T.nodeType===9&&T.documentElement.nodeName!=="HTML"||!!T.ownerDocument&&Q(T.ownerDocument)};var J=function(T,aa){var W=[],X="",Y,V=aa.nodeType?[aa]:aa;while((Y=I.match.PSEUDO.exec(T))){X+=Y[0];T=T.replace(I.match.PSEUDO,"")}T=I.relative[T]?T+"*":T;for(var Z=0,U=V.length;Z<U;Z++){F(T,V[Z],W)}return F.filter(X,W)};o.find=F;o.filter=F.filter;o.expr=F.selectors;o.expr[":"]=o.expr.filters;F.selectors.filters.hidden=function(T){return T.offsetWidth===0||T.offsetHeight===0};F.selectors.filters.visible=function(T){return T.offsetWidth>0||T.offsetHeight>0};F.selectors.filters.animated=function(T){return o.grep(o.timers,function(U){return T===U.elem}).length};o.multiFilter=function(V,T,U){if(U){V=":not("+V+")"}return F.matches(V,T)};o.dir=function(V,U){var T=[],W=V[U];while(W&&W!=document){if(W.nodeType==1){T.push(W)}W=W[U]}return T};o.nth=function(X,T,V,W){T=T||1;var U=0;for(;X;X=X[V]){if(X.nodeType==1&&++U==T){break}}return X};o.sibling=function(V,U){var T=[];for(;V;V=V.nextSibling){if(V.nodeType==1&&V!=U){T.push(V)}}return T};return;l.Sizzle=F})();o.event={add:function(I,F,H,K){if(I.nodeType==3||I.nodeType==8){return}if(I.setInterval&&I!=l){I=l}if(!H.guid){H.guid=this.guid++}if(K!==g){var G=H;H=this.proxy(G);H.data=K}var E=o.data(I,"events")||o.data(I,"events",{}),J=o.data(I,"handle")||o.data(I,"handle",function(){return typeof o!=="undefined"&&!o.event.triggered?o.event.handle.apply(arguments.callee.elem,arguments):g});J.elem=I;o.each(F.split(/\s+/),function(M,N){var O=N.split(".");N=O.shift();H.type=O.slice().sort().join(".");var L=E[N];if(o.event.specialAll[N]){o.event.specialAll[N].setup.call(I,K,O)}if(!L){L=E[N]={};if(!o.event.special[N]||o.event.special[N].setup.call(I,K,O)===false){if(I.addEventListener){I.addEventListener(N,J,false)}else{if(I.attachEvent){I.attachEvent("on"+N,J)}}}}L[H.guid]=H;o.event.global[N]=true});I=null},guid:1,global:{},remove:function(K,H,J){if(K.nodeType==3||K.nodeType==8){return}var G=o.data(K,"events"),F,E;if(G){if(H===g||(typeof H==="string"&&H.charAt(0)==".")){for(var I in G){this.remove(K,I+(H||""))}}else{if(H.type){J=H.handler;H=H.type}o.each(H.split(/\s+/),function(M,O){var Q=O.split(".");O=Q.shift();var N=RegExp("(^|\\.)"+Q.slice().sort().join(".*\\.")+"(\\.|$)");if(G[O]){if(J){delete G[O][J.guid]}else{for(var P in G[O]){if(N.test(G[O][P].type)){delete G[O][P]}}}if(o.event.specialAll[O]){o.event.specialAll[O].teardown.call(K,Q)}for(F in G[O]){break}if(!F){if(!o.event.special[O]||o.event.special[O].teardown.call(K,Q)===false){if(K.removeEventListener){K.removeEventListener(O,o.data(K,"handle"),false)}else{if(K.detachEvent){K.detachEvent("on"+O,o.data(K,"handle"))}}}F=null;delete G[O]}}})}for(F in G){break}if(!F){var L=o.data(K,"handle");if(L){L.elem=null}o.removeData(K,"events");o.removeData(K,"handle")}}},trigger:function(I,K,H,E){var G=I.type||I;if(!E){I=typeof I==="object"?I[h]?I:o.extend(o.Event(G),I):o.Event(G);if(G.indexOf("!")>=0){I.type=G=G.slice(0,-1);I.exclusive=true}if(!H){I.stopPropagation();if(this.global[G]){o.each(o.cache,function(){if(this.events&&this.events[G]){o.event.trigger(I,K,this.handle.elem)}})}}if(!H||H.nodeType==3||H.nodeType==8){return g}I.result=g;I.target=H;K=o.makeArray(K);K.unshift(I)}I.currentTarget=H;var J=o.data(H,"handle");if(J){J.apply(H,K)}if((!H[G]||(o.nodeName(H,"a")&&G=="click"))&&H["on"+G]&&H["on"+G].apply(H,K)===false){I.result=false}if(!E&&H[G]&&!I.isDefaultPrevented()&&!(o.nodeName(H,"a")&&G=="click")){this.triggered=true;try{H[G]()}catch(L){}}this.triggered=false;if(!I.isPropagationStopped()){var F=H.parentNode||H.ownerDocument;if(F){o.event.trigger(I,K,F,true)}}},handle:function(K){var J,E;K=arguments[0]=o.event.fix(K||l.event);K.currentTarget=this;var L=K.type.split(".");K.type=L.shift();J=!L.length&&!K.exclusive;var I=RegExp("(^|\\.)"+L.slice().sort().join(".*\\.")+"(\\.|$)");E=(o.data(this,"events")||{})[K.type];for(var G in E){var H=E[G];if(J||I.test(H.type)){K.handler=H;K.data=H.data;var F=H.apply(this,arguments);if(F!==g){K.result=F;if(F===false){K.preventDefault();K.stopPropagation()}}if(K.isImmediatePropagationStopped()){break}}}},props:"altKey attrChange attrName bubbles button cancelable charCode clientX clientY ctrlKey currentTarget data detail eventPhase fromElement handler keyCode metaKey newValue originalTarget pageX pageY prevValue relatedNode relatedTarget screenX screenY shiftKey srcElement target toElement view wheelDelta which".split(" "),fix:function(H){if(H[h]){return H}var F=H;H=o.Event(F);for(var G=this.props.length,J;G;){J=this.props[--G];H[J]=F[J]}if(!H.target){H.target=H.srcElement||document}if(H.target.nodeType==3){H.target=H.target.parentNode}if(!H.relatedTarget&&H.fromElement){H.relatedTarget=H.fromElement==H.target?H.toElement:H.fromElement}if(H.pageX==null&&H.clientX!=null){var I=document.documentElement,E=document.body;H.pageX=H.clientX+(I&&I.scrollLeft||E&&E.scrollLeft||0)-(I.clientLeft||0);H.pageY=H.clientY+(I&&I.scrollTop||E&&E.scrollTop||0)-(I.clientTop||0)}if(!H.which&&((H.charCode||H.charCode===0)?H.charCode:H.keyCode)){H.which=H.charCode||H.keyCode}if(!H.metaKey&&H.ctrlKey){H.metaKey=H.ctrlKey}if(!H.which&&H.button){H.which=(H.button&1?1:(H.button&2?3:(H.button&4?2:0)))}return H},proxy:function(F,E){E=E||function(){return F.apply(this,arguments)};E.guid=F.guid=F.guid||E.guid||this.guid++;return E},special:{ready:{setup:B,teardown:function(){}}},specialAll:{live:{setup:function(E,F){o.event.add(this,F[0],c)},teardown:function(G){if(G.length){var E=0,F=RegExp("(^|\\.)"+G[0]+"(\\.|$)");o.each((o.data(this,"events").live||{}),function(){if(F.test(this.type)){E++}});if(E<1){o.event.remove(this,G[0],c)}}}}}};o.Event=function(E){if(!this.preventDefault){return new o.Event(E)}if(E&&E.type){this.originalEvent=E;this.type=E.type}else{this.type=E}this.timeStamp=e();this[h]=true};function k(){return false}function u(){return true}o.Event.prototype={preventDefault:function(){this.isDefaultPrevented=u;var E=this.originalEvent;if(!E){return}if(E.preventDefault){E.preventDefault()}E.returnValue=false},stopPropagation:function(){this.isPropagationStopped=u;var E=this.originalEvent;if(!E){return}if(E.stopPropagation){E.stopPropagation()}E.cancelBubble=true},stopImmediatePropagation:function(){this.isImmediatePropagationStopped=u;this.stopPropagation()},isDefaultPrevented:k,isPropagationStopped:k,isImmediatePropagationStopped:k};var a=function(F){var E=F.relatedTarget;while(E&&E!=this){try{E=E.parentNode}catch(G){E=this}}if(E!=this){F.type=F.data;o.event.handle.apply(this,arguments)}};o.each({mouseover:"mouseenter",mouseout:"mouseleave"},function(F,E){o.event.special[E]={setup:function(){o.event.add(this,F,a,E)},teardown:function(){o.event.remove(this,F,a)}}});o.fn.extend({bind:function(F,G,E){return F=="unload"?this.one(F,G,E):this.each(function(){o.event.add(this,F,E||G,E&&G)})},one:function(G,H,F){var E=o.event.proxy(F||H,function(I){o(this).unbind(I,E);return(F||H).apply(this,arguments)});return this.each(function(){o.event.add(this,G,E,F&&H)})},unbind:function(F,E){return this.each(function(){o.event.remove(this,F,E)})},trigger:function(E,F){return this.each(function(){o.event.trigger(E,F,this)})},triggerHandler:function(E,G){if(this[0]){var F=o.Event(E);F.preventDefault();F.stopPropagation();o.event.trigger(F,G,this[0]);return F.result}},toggle:function(G){var E=arguments,F=1;while(F<E.length){o.event.proxy(G,E[F++])}return this.click(o.event.proxy(G,function(H){this.lastToggle=(this.lastToggle||0)%F;H.preventDefault();return E[this.lastToggle++].apply(this,arguments)||false}))},hover:function(E,F){return this.mouseenter(E).mouseleave(F)},ready:function(E){B();if(o.isReady){E.call(document,o)}else{o.readyList.push(E)}return this},live:function(G,F){var E=o.event.proxy(F);E.guid+=this.selector+G;o(document).bind(i(G,this.selector),this.selector,E);return this},die:function(F,E){o(document).unbind(i(F,this.selector),E?{guid:E.guid+this.selector+F}:null);return this}});function c(H){var E=RegExp("(^|\\.)"+H.type+"(\\.|$)"),G=true,F=[];o.each(o.data(this,"events").live||[],function(I,J){if(E.test(J.type)){var K=o(H.target).closest(J.data)[0];if(K){F.push({elem:K,fn:J})}}});F.sort(function(J,I){return o.data(J.elem,"closest")-o.data(I.elem,"closest")});o.each(F,function(){if(this.fn.call(this.elem,H,this.fn.data)===false){return(G=false)}});return G}function i(F,E){return["live",F,E.replace(/\./g,"`").replace(/ /g,"|")].join(".")}o.extend({isReady:false,readyList:[],ready:function(){if(!o.isReady){o.isReady=true;if(o.readyList){o.each(o.readyList,function(){this.call(document,o)});o.readyList=null}o(document).triggerHandler("ready")}}});var x=false;function B(){if(x){return}x=true;if(document.addEventListener){document.addEventListener("DOMContentLoaded",function(){document.removeEventListener("DOMContentLoaded",arguments.callee,false);o.ready()},false)}else{if(document.attachEvent){document.attachEvent("onreadystatechange",function(){if(document.readyState==="complete"){document.detachEvent("onreadystatechange",arguments.callee);o.ready()}});if(document.documentElement.doScroll&&l==l.top){(function(){if(o.isReady){return}try{document.documentElement.doScroll("left")}catch(E){setTimeout(arguments.callee,0);return}o.ready()})()}}}o.event.add(l,"load",o.ready)}o.each(("blur,focus,load,resize,scroll,unload,click,dblclick,mousedown,mouseup,mousemove,mouseover,mouseout,mouseenter,mouseleave,change,select,submit,keydown,keypress,keyup,error").split(","),function(F,E){o.fn[E]=function(G){return G?this.bind(E,G):this.trigger(E)}});o(l).bind("unload",function(){for(var E in o.cache){if(E!=1&&o.cache[E].handle){o.event.remove(o.cache[E].handle.elem)}}});(function(){o.support={};var F=document.documentElement,G=document.createElement("script"),K=document.createElement("div"),J="script"+(new Date).getTime();K.style.display="none";K.innerHTML=' <link/><table></table><a href="/a" style="color:red;float:left;opacity:.5;">a</a><select><option>text</option></select><object><param/></object>';var H=K.getElementsByTagName("*"),E=K.getElementsByTagName("a")[0];if(!H||!H.length||!E){return}o.support={leadingWhitespace:K.firstChild.nodeType==3,tbody:!K.getElementsByTagName("tbody").length,objectAll:!!K.getElementsByTagName("object")[0].getElementsByTagName("*").length,htmlSerialize:!!K.getElementsByTagName("link").length,style:/red/.test(E.getAttribute("style")),hrefNormalized:E.getAttribute("href")==="/a",opacity:E.style.opacity==="0.5",cssFloat:!!E.style.cssFloat,scriptEval:false,noCloneEvent:true,boxModel:null};G.type="text/javascript";try{G.appendChild(document.createTextNode("window."+J+"=1;"))}catch(I){}F.insertBefore(G,F.firstChild);if(l[J]){o.support.scriptEval=true;delete l[J]}F.removeChild(G);if(K.attachEvent&&K.fireEvent){K.attachEvent("onclick",function(){o.support.noCloneEvent=false;K.detachEvent("onclick",arguments.callee)});K.cloneNode(true).fireEvent("onclick")}o(function(){var L=document.createElement("div");L.style.width=L.style.paddingLeft="1px";document.body.appendChild(L);o.boxModel=o.support.boxModel=L.offsetWidth===2;document.body.removeChild(L).style.display="none"})})();var w=o.support.cssFloat?"cssFloat":"styleFloat";o.props={"for":"htmlFor","class":"className","float":w,cssFloat:w,styleFloat:w,readonly:"readOnly",maxlength:"maxLength",cellspacing:"cellSpacing",rowspan:"rowSpan",tabindex:"tabIndex"};o.fn.extend({_load:o.fn.load,load:function(G,J,K){if(typeof G!=="string"){return this._load(G)}var I=G.indexOf(" ");if(I>=0){var E=G.slice(I,G.length);G=G.slice(0,I)}var H="GET";if(J){if(o.isFunction(J)){K=J;J=null}else{if(typeof J==="object"){J=o.param(J);H="POST"}}}var F=this;o.ajax({url:G,type:H,dataType:"html",data:J,complete:function(M,L){if(L=="success"||L=="notmodified"){F.html(E?o("<div/>").append(M.responseText.replace(/<script(.|\s)*?\/script>/g,"")).find(E):M.responseText)}if(K){F.each(K,[M.responseText,L,M])}}});return this},serialize:function(){return o.param(this.serializeArray())},serializeArray:function(){return this.map(function(){return this.elements?o.makeArray(this.elements):this}).filter(function(){return this.name&&!this.disabled&&(this.checked||/select|textarea/i.test(this.nodeName)||/text|hidden|password|search/i.test(this.type))}).map(function(E,F){var G=o(this).val();return G==null?null:o.isArray(G)?o.map(G,function(I,H){return{name:F.name,value:I}}):{name:F.name,value:G}}).get()}});o.each("ajaxStart,ajaxStop,ajaxComplete,ajaxError,ajaxSuccess,ajaxSend".split(","),function(E,F){o.fn[F]=function(G){return this.bind(F,G)}});var r=e();o.extend({get:function(E,G,H,F){if(o.isFunction(G)){H=G;G=null}return o.ajax({type:"GET",url:E,data:G,success:H,dataType:F})},getScript:function(E,F){return o.get(E,null,F,"script")},getJSON:function(E,F,G){return o.get(E,F,G,"json")},post:function(E,G,H,F){if(o.isFunction(G)){H=G;G={}}return o.ajax({type:"POST",url:E,data:G,success:H,dataType:F})},ajaxSetup:function(E){o.extend(o.ajaxSettings,E)},ajaxSettings:{url:location.href,global:true,type:"GET",contentType:"application/x-www-form-urlencoded",processData:true,async:true,xhr:function(){return l.ActiveXObject?new ActiveXObject("Microsoft.XMLHTTP"):new XMLHttpRequest()},accepts:{xml:"application/xml, text/xml",html:"text/html",script:"text/javascript, application/javascript",json:"application/json, text/javascript",text:"text/plain",_default:"*/*"}},lastModified:{},ajax:function(M){M=o.extend(true,M,o.extend(true,{},o.ajaxSettings,M));var W,F=/=\?(&|$)/g,R,V,G=M.type.toUpperCase();if(M.data&&M.processData&&typeof M.data!=="string"){M.data=o.param(M.data)}if(M.dataType=="jsonp"){if(G=="GET"){if(!M.url.match(F)){M.url+=(M.url.match(/\?/)?"&":"?")+(M.jsonp||"callback")+"=?"}}else{if(!M.data||!M.data.match(F)){M.data=(M.data?M.data+"&":"")+(M.jsonp||"callback")+"=?"}}M.dataType="json"}if(M.dataType=="json"&&(M.data&&M.data.match(F)||M.url.match(F))){W="jsonp"+r++;if(M.data){M.data=(M.data+"").replace(F,"="+W+"$1")}M.url=M.url.replace(F,"="+W+"$1");M.dataType="script";l[W]=function(X){V=X;I();L();l[W]=g;try{delete l[W]}catch(Y){}if(H){H.removeChild(T)}}}if(M.dataType=="script"&&M.cache==null){M.cache=false}if(M.cache===false&&G=="GET"){var E=e();var U=M.url.replace(/(\?|&)_=.*?(&|$)/,"$1_="+E+"$2");M.url=U+((U==M.url)?(M.url.match(/\?/)?"&":"?")+"_="+E:"")}if(M.data&&G=="GET"){M.url+=(M.url.match(/\?/)?"&":"?")+M.data;M.data=null}if(M.global&&!o.active++){o.event.trigger("ajaxStart")}var Q=/^(\w+:)?\/\/([^\/?#]+)/.exec(M.url);if(M.dataType=="script"&&G=="GET"&&Q&&(Q[1]&&Q[1]!=location.protocol||Q[2]!=location.host)){var H=document.getElementsByTagName("head")[0];var T=document.createElement("script");T.src=M.url;if(M.scriptCharset){T.charset=M.scriptCharset}if(!W){var O=false;T.onload=T.onreadystatechange=function(){if(!O&&(!this.readyState||this.readyState=="loaded"||this.readyState=="complete")){O=true;I();L();T.onload=T.onreadystatechange=null;H.removeChild(T)}}}H.appendChild(T);return g}var K=false;var J=M.xhr();if(M.username){J.open(G,M.url,M.async,M.username,M.password)}else{J.open(G,M.url,M.async)}try{if(M.data){J.setRequestHeader("Content-Type",M.contentType)}if(M.ifModified){J.setRequestHeader("If-Modified-Since",o.lastModified[M.url]||"Thu, 01 Jan 1970 00:00:00 GMT")}J.setRequestHeader("X-Requested-With","XMLHttpRequest");J.setRequestHeader("Accept",M.dataType&&M.accepts[M.dataType]?M.accepts[M.dataType]+", */*":M.accepts._default)}catch(S){}if(M.beforeSend&&M.beforeSend(J,M)===false){if(M.global&&!--o.active){o.event.trigger("ajaxStop")}J.abort();return false}if(M.global){o.event.trigger("ajaxSend",[J,M])}var N=function(X){if(J.readyState==0){if(P){clearInterval(P);P=null;if(M.global&&!--o.active){o.event.trigger("ajaxStop")}}}else{if(!K&&J&&(J.readyState==4||X=="timeout")){K=true;if(P){clearInterval(P);P=null}R=X=="timeout"?"timeout":!o.httpSuccess(J)?"error":M.ifModified&&o.httpNotModified(J,M.url)?"notmodified":"success";if(R=="success"){try{V=o.httpData(J,M.dataType,M)}catch(Z){R="parsererror"}}if(R=="success"){var Y;try{Y=J.getResponseHeader("Last-Modified")}catch(Z){}if(M.ifModified&&Y){o.lastModified[M.url]=Y}if(!W){I()}}else{o.handleError(M,J,R)}L();if(X){J.abort()}if(M.async){J=null}}}};if(M.async){var P=setInterval(N,13);if(M.timeout>0){setTimeout(function(){if(J&&!K){N("timeout")}},M.timeout)}}try{J.send(M.data)}catch(S){o.handleError(M,J,null,S)}if(!M.async){N()}function I(){if(M.success){M.success(V,R)}if(M.global){o.event.trigger("ajaxSuccess",[J,M])}}function L(){if(M.complete){M.complete(J,R)}if(M.global){o.event.trigger("ajaxComplete",[J,M])}if(M.global&&!--o.active){o.event.trigger("ajaxStop")}}return J},handleError:function(F,H,E,G){if(F.error){F.error(H,E,G)}if(F.global){o.event.trigger("ajaxError",[H,F,G])}},active:0,httpSuccess:function(F){try{return !F.status&&location.protocol=="file:"||(F.status>=200&&F.status<300)||F.status==304||F.status==1223}catch(E){}return false},httpNotModified:function(G,E){try{var H=G.getResponseHeader("Last-Modified");return G.status==304||H==o.lastModified[E]}catch(F){}return false},httpData:function(J,H,G){var F=J.getResponseHeader("content-type"),E=H=="xml"||!H&&F&&F.indexOf("xml")>=0,I=E?J.responseXML:J.responseText;if(E&&I.documentElement.tagName=="parsererror"){throw"parsererror"}if(G&&G.dataFilter){I=G.dataFilter(I,H)}if(typeof I==="string"){if(H=="script"){o.globalEval(I)}if(H=="json"){I=l["eval"]("("+I+")")}}return I},param:function(E){var G=[];function H(I,J){G[G.length]=encodeURIComponent(I)+"="+encodeURIComponent(J)}if(o.isArray(E)||E.jquery){o.each(E,function(){H(this.name,this.value)})}else{for(var F in E){if(o.isArray(E[F])){o.each(E[F],function(){H(F,this)})}else{H(F,o.isFunction(E[F])?E[F]():E[F])}}}return G.join("&").replace(/%20/g,"+")}});var m={},n,d=[["height","marginTop","marginBottom","paddingTop","paddingBottom"],["width","marginLeft","marginRight","paddingLeft","paddingRight"],["opacity"]];function t(F,E){var G={};o.each(d.concat.apply([],d.slice(0,E)),function(){G[this]=F});return G}o.fn.extend({show:function(J,L){if(J){return this.animate(t("show",3),J,L)}else{for(var H=0,F=this.length;H<F;H++){var E=o.data(this[H],"olddisplay");this[H].style.display=E||"";if(o.css(this[H],"display")==="none"){var G=this[H].tagName,K;if(m[G]){K=m[G]}else{var I=o("<"+G+" />").appendTo("body");K=I.css("display");if(K==="none"){K="block"}I.remove();m[G]=K}o.data(this[H],"olddisplay",K)}}for(var H=0,F=this.length;H<F;H++){this[H].style.display=o.data(this[H],"olddisplay")||""}return this}},hide:function(H,I){if(H){return this.animate(t("hide",3),H,I)}else{for(var G=0,F=this.length;G<F;G++){var E=o.data(this[G],"olddisplay");if(!E&&E!=="none"){o.data(this[G],"olddisplay",o.css(this[G],"display"))}}for(var G=0,F=this.length;G<F;G++){this[G].style.display="none"}return this}},_toggle:o.fn.toggle,toggle:function(G,F){var E=typeof G==="boolean";return o.isFunction(G)&&o.isFunction(F)?this._toggle.apply(this,arguments):G==null||E?this.each(function(){var H=E?G:o(this).is(":hidden");o(this)[H?"show":"hide"]()}):this.animate(t("toggle",3),G,F)},fadeTo:function(E,G,F){return this.animate({opacity:G},E,F)},animate:function(I,F,H,G){var E=o.speed(F,H,G);return this[E.queue===false?"each":"queue"](function(){var K=o.extend({},E),M,L=this.nodeType==1&&o(this).is(":hidden"),J=this;for(M in I){if(I[M]=="hide"&&L||I[M]=="show"&&!L){return K.complete.call(this)}if((M=="height"||M=="width")&&this.style){K.display=o.css(this,"display");K.overflow=this.style.overflow}}if(K.overflow!=null){this.style.overflow="hidden"}K.curAnim=o.extend({},I);o.each(I,function(O,S){var R=new o.fx(J,K,O);if(/toggle|show|hide/.test(S)){R[S=="toggle"?L?"show":"hide":S](I)}else{var Q=S.toString().match(/^([+-]=)?([\d+-.]+)(.*)$/),T=R.cur(true)||0;if(Q){var N=parseFloat(Q[2]),P=Q[3]||"px";if(P!="px"){J.style[O]=(N||1)+P;T=((N||1)/R.cur(true))*T;J.style[O]=T+P}if(Q[1]){N=((Q[1]=="-="?-1:1)*N)+T}R.custom(T,N,P)}else{R.custom(T,S,"")}}});return true})},stop:function(F,E){var G=o.timers;if(F){this.queue([])}this.each(function(){for(var H=G.length-1;H>=0;H--){if(G[H].elem==this){if(E){G[H](true)}G.splice(H,1)}}});if(!E){this.dequeue()}return this}});o.each({slideDown:t("show",1),slideUp:t("hide",1),slideToggle:t("toggle",1),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"}},function(E,F){o.fn[E]=function(G,H){return this.animate(F,G,H)}});o.extend({speed:function(G,H,F){var E=typeof G==="object"?G:{complete:F||!F&&H||o.isFunction(G)&&G,duration:G,easing:F&&H||H&&!o.isFunction(H)&&H};E.duration=o.fx.off?0:typeof E.duration==="number"?E.duration:o.fx.speeds[E.duration]||o.fx.speeds._default;E.old=E.complete;E.complete=function(){if(E.queue!==false){o(this).dequeue()}if(o.isFunction(E.old)){E.old.call(this)}};return E},easing:{linear:function(G,H,E,F){return E+F*G},swing:function(G,H,E,F){return((-Math.cos(G*Math.PI)/2)+0.5)*F+E}},timers:[],fx:function(F,E,G){this.options=E;this.elem=F;this.prop=G;if(!E.orig){E.orig={}}}});o.fx.prototype={update:function(){if(this.options.step){this.options.step.call(this.elem,this.now,this)}(o.fx.step[this.prop]||o.fx.step._default)(this);if((this.prop=="height"||this.prop=="width")&&this.elem.style){this.elem.style.display="block"}},cur:function(F){if(this.elem[this.prop]!=null&&(!this.elem.style||this.elem.style[this.prop]==null)){return this.elem[this.prop]}var E=parseFloat(o.css(this.elem,this.prop,F));return E&&E>-10000?E:parseFloat(o.curCSS(this.elem,this.prop))||0},custom:function(I,H,G){this.startTime=e();this.start=I;this.end=H;this.unit=G||this.unit||"px";this.now=this.start;this.pos=this.state=0;var E=this;function F(J){return E.step(J)}F.elem=this.elem;if(F()&&o.timers.push(F)&&!n){n=setInterval(function(){var K=o.timers;for(var J=0;J<K.length;J++){if(!K[J]()){K.splice(J--,1)}}if(!K.length){clearInterval(n);n=g}},13)}},show:function(){this.options.orig[this.prop]=o.attr(this.elem.style,this.prop);this.options.show=true;this.custom(this.prop=="width"||this.prop=="height"?1:0,this.cur());o(this.elem).show()},hide:function(){this.options.orig[this.prop]=o.attr(this.elem.style,this.prop);this.options.hide=true;this.custom(this.cur(),0)},step:function(H){var G=e();if(H||G>=this.options.duration+this.startTime){this.now=this.end;this.pos=this.state=1;this.update();this.options.curAnim[this.prop]=true;var E=true;for(var F in this.options.curAnim){if(this.options.curAnim[F]!==true){E=false}}if(E){if(this.options.display!=null){this.elem.style.overflow=this.options.overflow;this.elem.style.display=this.options.display;if(o.css(this.elem,"display")=="none"){this.elem.style.display="block"}}if(this.options.hide){o(this.elem).hide()}if(this.options.hide||this.options.show){for(var I in this.options.curAnim){o.attr(this.elem.style,I,this.options.orig[I])}}this.options.complete.call(this.elem)}return false}else{var J=G-this.startTime;this.state=J/this.options.duration;this.pos=o.easing[this.options.easing||(o.easing.swing?"swing":"linear")](this.state,J,0,1,this.options.duration);this.now=this.start+((this.end-this.start)*this.pos);this.update()}return true}};o.extend(o.fx,{speeds:{slow:600,fast:200,_default:400},step:{opacity:function(E){o.attr(E.elem.style,"opacity",E.now)},_default:function(E){if(E.elem.style&&E.elem.style[E.prop]!=null){E.elem.style[E.prop]=E.now+E.unit}else{E.elem[E.prop]=E.now}}}});if(document.documentElement.getBoundingClientRect){o.fn.offset=function(){if(!this[0]){return{top:0,left:0}}if(this[0]===this[0].ownerDocument.body){return o.offset.bodyOffset(this[0])}var G=this[0].getBoundingClientRect(),J=this[0].ownerDocument,F=J.body,E=J.documentElement,L=E.clientTop||F.clientTop||0,K=E.clientLeft||F.clientLeft||0,I=G.top+(self.pageYOffset||o.boxModel&&E.scrollTop||F.scrollTop)-L,H=G.left+(self.pageXOffset||o.boxModel&&E.scrollLeft||F.scrollLeft)-K;return{top:I,left:H}}}else{o.fn.offset=function(){if(!this[0]){return{top:0,left:0}}if(this[0]===this[0].ownerDocument.body){return o.offset.bodyOffset(this[0])}o.offset.initialized||o.offset.initialize();var J=this[0],G=J.offsetParent,F=J,O=J.ownerDocument,M,H=O.documentElement,K=O.body,L=O.defaultView,E=L.getComputedStyle(J,null),N=J.offsetTop,I=J.offsetLeft;while((J=J.parentNode)&&J!==K&&J!==H){M=L.getComputedStyle(J,null);N-=J.scrollTop,I-=J.scrollLeft;if(J===G){N+=J.offsetTop,I+=J.offsetLeft;if(o.offset.doesNotAddBorder&&!(o.offset.doesAddBorderForTableAndCells&&/^t(able|d|h)$/i.test(J.tagName))){N+=parseInt(M.borderTopWidth,10)||0,I+=parseInt(M.borderLeftWidth,10)||0}F=G,G=J.offsetParent}if(o.offset.subtractsBorderForOverflowNotVisible&&M.overflow!=="visible"){N+=parseInt(M.borderTopWidth,10)||0,I+=parseInt(M.borderLeftWidth,10)||0}E=M}if(E.position==="relative"||E.position==="static"){N+=K.offsetTop,I+=K.offsetLeft}if(E.position==="fixed"){N+=Math.max(H.scrollTop,K.scrollTop),I+=Math.max(H.scrollLeft,K.scrollLeft)}return{top:N,left:I}}}o.offset={initialize:function(){if(this.initialized){return}var L=document.body,F=document.createElement("div"),H,G,N,I,M,E,J=L.style.marginTop,K='<div style="position:absolute;top:0;left:0;margin:0;border:5px solid #000;padding:0;width:1px;height:1px;"><div></div></div><table style="position:absolute;top:0;left:0;margin:0;border:5px solid #000;padding:0;width:1px;height:1px;" cellpadding="0" cellspacing="0"><tr><td></td></tr></table>';M={position:"absolute",top:0,left:0,margin:0,border:0,width:"1px",height:"1px",visibility:"hidden"};for(E in M){F.style[E]=M[E]}F.innerHTML=K;L.insertBefore(F,L.firstChild);H=F.firstChild,G=H.firstChild,I=H.nextSibling.firstChild.firstChild;this.doesNotAddBorder=(G.offsetTop!==5);this.doesAddBorderForTableAndCells=(I.offsetTop===5);H.style.overflow="hidden",H.style.position="relative";this.subtractsBorderForOverflowNotVisible=(G.offsetTop===-5);L.style.marginTop="1px";this.doesNotIncludeMarginInBodyOffset=(L.offsetTop===0);L.style.marginTop=J;L.removeChild(F);this.initialized=true},bodyOffset:function(E){o.offset.initialized||o.offset.initialize();var G=E.offsetTop,F=E.offsetLeft;if(o.offset.doesNotIncludeMarginInBodyOffset){G+=parseInt(o.curCSS(E,"marginTop",true),10)||0,F+=parseInt(o.curCSS(E,"marginLeft",true),10)||0}return{top:G,left:F}}};o.fn.extend({position:function(){var I=0,H=0,F;if(this[0]){var G=this.offsetParent(),J=this.offset(),E=/^body|html$/i.test(G[0].tagName)?{top:0,left:0}:G.offset();J.top-=j(this,"marginTop");J.left-=j(this,"marginLeft");E.top+=j(G,"borderTopWidth");E.left+=j(G,"borderLeftWidth");F={top:J.top-E.top,left:J.left-E.left}}return F},offsetParent:function(){var E=this[0].offsetParent||document.body;while(E&&(!/^body|html$/i.test(E.tagName)&&o.css(E,"position")=="static")){E=E.offsetParent}return o(E)}});o.each(["Left","Top"],function(F,E){var G="scroll"+E;o.fn[G]=function(H){if(!this[0]){return null}return H!==g?this.each(function(){this==l||this==document?l.scrollTo(!F?H:o(l).scrollLeft(),F?H:o(l).scrollTop()):this[G]=H}):this[0]==l||this[0]==document?self[F?"pageYOffset":"pageXOffset"]||o.boxModel&&document.documentElement[G]||document.body[G]:this[0][G]}});o.each(["Height","Width"],function(I,G){var E=I?"Left":"Top",H=I?"Right":"Bottom",F=G.toLowerCase();o.fn["inner"+G]=function(){return this[0]?o.css(this[0],F,false,"padding"):null};o.fn["outer"+G]=function(K){return this[0]?o.css(this[0],F,false,K?"margin":"border"):null};var J=G.toLowerCase();o.fn[J]=function(K){return this[0]==l?document.compatMode=="CSS1Compat"&&document.documentElement["client"+G]||document.body["client"+G]:this[0]==document?Math.max(document.documentElement["client"+G],document.body["scroll"+G],document.documentElement["scroll"+G],document.body["offset"+G],document.documentElement["offset"+G]):K===g?(this.length?o.css(this[0],J):null):this.css(J,typeof K==="string"?K:K+"px")}})})(); \ No newline at end of file
+(function(A,w){function ma(){if(!c.isReady){try{s.documentElement.doScroll("left")}catch(a){setTimeout(ma,1);return}c.ready()}}function Qa(a,b){b.src?c.ajax({url:b.src,async:false,dataType:"script"}):c.globalEval(b.text||b.textContent||b.innerHTML||"");b.parentNode&&b.parentNode.removeChild(b)}function X(a,b,d,f,e,j){var i=a.length;if(typeof b==="object"){for(var o in b)X(a,o,b[o],f,e,d);return a}if(d!==w){f=!j&&f&&c.isFunction(d);for(o=0;o<i;o++)e(a[o],b,f?d.call(a[o],o,e(a[o],b)):d,j);return a}return i?
+e(a[0],b):w}function J(){return(new Date).getTime()}function Y(){return false}function Z(){return true}function na(a,b,d){d[0].type=a;return c.event.handle.apply(b,d)}function oa(a){var b,d=[],f=[],e=arguments,j,i,o,k,n,r;i=c.data(this,"events");if(!(a.liveFired===this||!i||!i.live||a.button&&a.type==="click")){a.liveFired=this;var u=i.live.slice(0);for(k=0;k<u.length;k++){i=u[k];i.origType.replace(O,"")===a.type?f.push(i.selector):u.splice(k--,1)}j=c(a.target).closest(f,a.currentTarget);n=0;for(r=
+j.length;n<r;n++)for(k=0;k<u.length;k++){i=u[k];if(j[n].selector===i.selector){o=j[n].elem;f=null;if(i.preType==="mouseenter"||i.preType==="mouseleave")f=c(a.relatedTarget).closest(i.selector)[0];if(!f||f!==o)d.push({elem:o,handleObj:i})}}n=0;for(r=d.length;n<r;n++){j=d[n];a.currentTarget=j.elem;a.data=j.handleObj.data;a.handleObj=j.handleObj;if(j.handleObj.origHandler.apply(j.elem,e)===false){b=false;break}}return b}}function pa(a,b){return"live."+(a&&a!=="*"?a+".":"")+b.replace(/\./g,"`").replace(/ /g,
+"&")}function qa(a){return!a||!a.parentNode||a.parentNode.nodeType===11}function ra(a,b){var d=0;b.each(function(){if(this.nodeName===(a[d]&&a[d].nodeName)){var f=c.data(a[d++]),e=c.data(this,f);if(f=f&&f.events){delete e.handle;e.events={};for(var j in f)for(var i in f[j])c.event.add(this,j,f[j][i],f[j][i].data)}}})}function sa(a,b,d){var f,e,j;b=b&&b[0]?b[0].ownerDocument||b[0]:s;if(a.length===1&&typeof a[0]==="string"&&a[0].length<512&&b===s&&!ta.test(a[0])&&(c.support.checkClone||!ua.test(a[0]))){e=
+true;if(j=c.fragments[a[0]])if(j!==1)f=j}if(!f){f=b.createDocumentFragment();c.clean(a,b,f,d)}if(e)c.fragments[a[0]]=j?f:1;return{fragment:f,cacheable:e}}function K(a,b){var d={};c.each(va.concat.apply([],va.slice(0,b)),function(){d[this]=a});return d}function wa(a){return"scrollTo"in a&&a.document?a:a.nodeType===9?a.defaultView||a.parentWindow:false}var c=function(a,b){return new c.fn.init(a,b)},Ra=A.jQuery,Sa=A.$,s=A.document,T,Ta=/^[^<]*(<[\w\W]+>)[^>]*$|^#([\w-]+)$/,Ua=/^.[^:#\[\.,]*$/,Va=/\S/,
+Wa=/^(\s|\u00A0)+|(\s|\u00A0)+$/g,Xa=/^<(\w+)\s*\/?>(?:<\/\1>)?$/,P=navigator.userAgent,xa=false,Q=[],L,$=Object.prototype.toString,aa=Object.prototype.hasOwnProperty,ba=Array.prototype.push,R=Array.prototype.slice,ya=Array.prototype.indexOf;c.fn=c.prototype={init:function(a,b){var d,f;if(!a)return this;if(a.nodeType){this.context=this[0]=a;this.length=1;return this}if(a==="body"&&!b){this.context=s;this[0]=s.body;this.selector="body";this.length=1;return this}if(typeof a==="string")if((d=Ta.exec(a))&&
+(d[1]||!b))if(d[1]){f=b?b.ownerDocument||b:s;if(a=Xa.exec(a))if(c.isPlainObject(b)){a=[s.createElement(a[1])];c.fn.attr.call(a,b,true)}else a=[f.createElement(a[1])];else{a=sa([d[1]],[f]);a=(a.cacheable?a.fragment.cloneNode(true):a.fragment).childNodes}return c.merge(this,a)}else{if(b=s.getElementById(d[2])){if(b.id!==d[2])return T.find(a);this.length=1;this[0]=b}this.context=s;this.selector=a;return this}else if(!b&&/^\w+$/.test(a)){this.selector=a;this.context=s;a=s.getElementsByTagName(a);return c.merge(this,
+a)}else return!b||b.jquery?(b||T).find(a):c(b).find(a);else if(c.isFunction(a))return T.ready(a);if(a.selector!==w){this.selector=a.selector;this.context=a.context}return c.makeArray(a,this)},selector:"",jquery:"1.4.2",length:0,size:function(){return this.length},toArray:function(){return R.call(this,0)},get:function(a){return a==null?this.toArray():a<0?this.slice(a)[0]:this[a]},pushStack:function(a,b,d){var f=c();c.isArray(a)?ba.apply(f,a):c.merge(f,a);f.prevObject=this;f.context=this.context;if(b===
+"find")f.selector=this.selector+(this.selector?" ":"")+d;else if(b)f.selector=this.selector+"."+b+"("+d+")";return f},each:function(a,b){return c.each(this,a,b)},ready:function(a){c.bindReady();if(c.isReady)a.call(s,c);else Q&&Q.push(a);return this},eq:function(a){return a===-1?this.slice(a):this.slice(a,+a+1)},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},slice:function(){return this.pushStack(R.apply(this,arguments),"slice",R.call(arguments).join(","))},map:function(a){return this.pushStack(c.map(this,
+function(b,d){return a.call(b,d,b)}))},end:function(){return this.prevObject||c(null)},push:ba,sort:[].sort,splice:[].splice};c.fn.init.prototype=c.fn;c.extend=c.fn.extend=function(){var a=arguments[0]||{},b=1,d=arguments.length,f=false,e,j,i,o;if(typeof a==="boolean"){f=a;a=arguments[1]||{};b=2}if(typeof a!=="object"&&!c.isFunction(a))a={};if(d===b){a=this;--b}for(;b<d;b++)if((e=arguments[b])!=null)for(j in e){i=a[j];o=e[j];if(a!==o)if(f&&o&&(c.isPlainObject(o)||c.isArray(o))){i=i&&(c.isPlainObject(i)||
+c.isArray(i))?i:c.isArray(o)?[]:{};a[j]=c.extend(f,i,o)}else if(o!==w)a[j]=o}return a};c.extend({noConflict:function(a){A.$=Sa;if(a)A.jQuery=Ra;return c},isReady:false,ready:function(){if(!c.isReady){if(!s.body)return setTimeout(c.ready,13);c.isReady=true;if(Q){for(var a,b=0;a=Q[b++];)a.call(s,c);Q=null}c.fn.triggerHandler&&c(s).triggerHandler("ready")}},bindReady:function(){if(!xa){xa=true;if(s.readyState==="complete")return c.ready();if(s.addEventListener){s.addEventListener("DOMContentLoaded",
+L,false);A.addEventListener("load",c.ready,false)}else if(s.attachEvent){s.attachEvent("onreadystatechange",L);A.attachEvent("onload",c.ready);var a=false;try{a=A.frameElement==null}catch(b){}s.documentElement.doScroll&&a&&ma()}}},isFunction:function(a){return $.call(a)==="[object Function]"},isArray:function(a){return $.call(a)==="[object Array]"},isPlainObject:function(a){if(!a||$.call(a)!=="[object Object]"||a.nodeType||a.setInterval)return false;if(a.constructor&&!aa.call(a,"constructor")&&!aa.call(a.constructor.prototype,
+"isPrototypeOf"))return false;var b;for(b in a);return b===w||aa.call(a,b)},isEmptyObject:function(a){for(var b in a)return false;return true},error:function(a){throw a;},parseJSON:function(a){if(typeof a!=="string"||!a)return null;a=c.trim(a);if(/^[\],:{}\s]*$/.test(a.replace(/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g,"@").replace(/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g,"]").replace(/(?:^|:|,)(?:\s*\[)+/g,"")))return A.JSON&&A.JSON.parse?A.JSON.parse(a):(new Function("return "+
+a))();else c.error("Invalid JSON: "+a)},noop:function(){},globalEval:function(a){if(a&&Va.test(a)){var b=s.getElementsByTagName("head")[0]||s.documentElement,d=s.createElement("script");d.type="text/javascript";if(c.support.scriptEval)d.appendChild(s.createTextNode(a));else d.text=a;b.insertBefore(d,b.firstChild);b.removeChild(d)}},nodeName:function(a,b){return a.nodeName&&a.nodeName.toUpperCase()===b.toUpperCase()},each:function(a,b,d){var f,e=0,j=a.length,i=j===w||c.isFunction(a);if(d)if(i)for(f in a){if(b.apply(a[f],
+d)===false)break}else for(;e<j;){if(b.apply(a[e++],d)===false)break}else if(i)for(f in a){if(b.call(a[f],f,a[f])===false)break}else for(d=a[0];e<j&&b.call(d,e,d)!==false;d=a[++e]);return a},trim:function(a){return(a||"").replace(Wa,"")},makeArray:function(a,b){b=b||[];if(a!=null)a.length==null||typeof a==="string"||c.isFunction(a)||typeof a!=="function"&&a.setInterval?ba.call(b,a):c.merge(b,a);return b},inArray:function(a,b){if(b.indexOf)return b.indexOf(a);for(var d=0,f=b.length;d<f;d++)if(b[d]===
+a)return d;return-1},merge:function(a,b){var d=a.length,f=0;if(typeof b.length==="number")for(var e=b.length;f<e;f++)a[d++]=b[f];else for(;b[f]!==w;)a[d++]=b[f++];a.length=d;return a},grep:function(a,b,d){for(var f=[],e=0,j=a.length;e<j;e++)!d!==!b(a[e],e)&&f.push(a[e]);return f},map:function(a,b,d){for(var f=[],e,j=0,i=a.length;j<i;j++){e=b(a[j],j,d);if(e!=null)f[f.length]=e}return f.concat.apply([],f)},guid:1,proxy:function(a,b,d){if(arguments.length===2)if(typeof b==="string"){d=a;a=d[b];b=w}else if(b&&
+!c.isFunction(b)){d=b;b=w}if(!b&&a)b=function(){return a.apply(d||this,arguments)};if(a)b.guid=a.guid=a.guid||b.guid||c.guid++;return b},uaMatch:function(a){a=a.toLowerCase();a=/(webkit)[ \/]([\w.]+)/.exec(a)||/(opera)(?:.*version)?[ \/]([\w.]+)/.exec(a)||/(msie) ([\w.]+)/.exec(a)||!/compatible/.test(a)&&/(mozilla)(?:.*? rv:([\w.]+))?/.exec(a)||[];return{browser:a[1]||"",version:a[2]||"0"}},browser:{}});P=c.uaMatch(P);if(P.browser){c.browser[P.browser]=true;c.browser.version=P.version}if(c.browser.webkit)c.browser.safari=
+true;if(ya)c.inArray=function(a,b){return ya.call(b,a)};T=c(s);if(s.addEventListener)L=function(){s.removeEventListener("DOMContentLoaded",L,false);c.ready()};else if(s.attachEvent)L=function(){if(s.readyState==="complete"){s.detachEvent("onreadystatechange",L);c.ready()}};(function(){c.support={};var a=s.documentElement,b=s.createElement("script"),d=s.createElement("div"),f="script"+J();d.style.display="none";d.innerHTML=" <link/><table></table><a href='/a' style='color:red;float:left;opacity:.55;'>a</a><input type='checkbox'/>";
+var e=d.getElementsByTagName("*"),j=d.getElementsByTagName("a")[0];if(!(!e||!e.length||!j)){c.support={leadingWhitespace:d.firstChild.nodeType===3,tbody:!d.getElementsByTagName("tbody").length,htmlSerialize:!!d.getElementsByTagName("link").length,style:/red/.test(j.getAttribute("style")),hrefNormalized:j.getAttribute("href")==="/a",opacity:/^0.55$/.test(j.style.opacity),cssFloat:!!j.style.cssFloat,checkOn:d.getElementsByTagName("input")[0].value==="on",optSelected:s.createElement("select").appendChild(s.createElement("option")).selected,
+parentNode:d.removeChild(d.appendChild(s.createElement("div"))).parentNode===null,deleteExpando:true,checkClone:false,scriptEval:false,noCloneEvent:true,boxModel:null};b.type="text/javascript";try{b.appendChild(s.createTextNode("window."+f+"=1;"))}catch(i){}a.insertBefore(b,a.firstChild);if(A[f]){c.support.scriptEval=true;delete A[f]}try{delete b.test}catch(o){c.support.deleteExpando=false}a.removeChild(b);if(d.attachEvent&&d.fireEvent){d.attachEvent("onclick",function k(){c.support.noCloneEvent=
+false;d.detachEvent("onclick",k)});d.cloneNode(true).fireEvent("onclick")}d=s.createElement("div");d.innerHTML="<input type='radio' name='radiotest' checked='checked'/>";a=s.createDocumentFragment();a.appendChild(d.firstChild);c.support.checkClone=a.cloneNode(true).cloneNode(true).lastChild.checked;c(function(){var k=s.createElement("div");k.style.width=k.style.paddingLeft="1px";s.body.appendChild(k);c.boxModel=c.support.boxModel=k.offsetWidth===2;s.body.removeChild(k).style.display="none"});a=function(k){var n=
+s.createElement("div");k="on"+k;var r=k in n;if(!r){n.setAttribute(k,"return;");r=typeof n[k]==="function"}return r};c.support.submitBubbles=a("submit");c.support.changeBubbles=a("change");a=b=d=e=j=null}})();c.props={"for":"htmlFor","class":"className",readonly:"readOnly",maxlength:"maxLength",cellspacing:"cellSpacing",rowspan:"rowSpan",colspan:"colSpan",tabindex:"tabIndex",usemap:"useMap",frameborder:"frameBorder"};var G="jQuery"+J(),Ya=0,za={};c.extend({cache:{},expando:G,noData:{embed:true,object:true,
+applet:true},data:function(a,b,d){if(!(a.nodeName&&c.noData[a.nodeName.toLowerCase()])){a=a==A?za:a;var f=a[G],e=c.cache;if(!f&&typeof b==="string"&&d===w)return null;f||(f=++Ya);if(typeof b==="object"){a[G]=f;e[f]=c.extend(true,{},b)}else if(!e[f]){a[G]=f;e[f]={}}a=e[f];if(d!==w)a[b]=d;return typeof b==="string"?a[b]:a}},removeData:function(a,b){if(!(a.nodeName&&c.noData[a.nodeName.toLowerCase()])){a=a==A?za:a;var d=a[G],f=c.cache,e=f[d];if(b){if(e){delete e[b];c.isEmptyObject(e)&&c.removeData(a)}}else{if(c.support.deleteExpando)delete a[c.expando];
+else a.removeAttribute&&a.removeAttribute(c.expando);delete f[d]}}}});c.fn.extend({data:function(a,b){if(typeof a==="undefined"&&this.length)return c.data(this[0]);else if(typeof a==="object")return this.each(function(){c.data(this,a)});var d=a.split(".");d[1]=d[1]?"."+d[1]:"";if(b===w){var f=this.triggerHandler("getData"+d[1]+"!",[d[0]]);if(f===w&&this.length)f=c.data(this[0],a);return f===w&&d[1]?this.data(d[0]):f}else return this.trigger("setData"+d[1]+"!",[d[0],b]).each(function(){c.data(this,
+a,b)})},removeData:function(a){return this.each(function(){c.removeData(this,a)})}});c.extend({queue:function(a,b,d){if(a){b=(b||"fx")+"queue";var f=c.data(a,b);if(!d)return f||[];if(!f||c.isArray(d))f=c.data(a,b,c.makeArray(d));else f.push(d);return f}},dequeue:function(a,b){b=b||"fx";var d=c.queue(a,b),f=d.shift();if(f==="inprogress")f=d.shift();if(f){b==="fx"&&d.unshift("inprogress");f.call(a,function(){c.dequeue(a,b)})}}});c.fn.extend({queue:function(a,b){if(typeof a!=="string"){b=a;a="fx"}if(b===
+w)return c.queue(this[0],a);return this.each(function(){var d=c.queue(this,a,b);a==="fx"&&d[0]!=="inprogress"&&c.dequeue(this,a)})},dequeue:function(a){return this.each(function(){c.dequeue(this,a)})},delay:function(a,b){a=c.fx?c.fx.speeds[a]||a:a;b=b||"fx";return this.queue(b,function(){var d=this;setTimeout(function(){c.dequeue(d,b)},a)})},clearQueue:function(a){return this.queue(a||"fx",[])}});var Aa=/[\n\t]/g,ca=/\s+/,Za=/\r/g,$a=/href|src|style/,ab=/(button|input)/i,bb=/(button|input|object|select|textarea)/i,
+cb=/^(a|area)$/i,Ba=/radio|checkbox/;c.fn.extend({attr:function(a,b){return X(this,a,b,true,c.attr)},removeAttr:function(a){return this.each(function(){c.attr(this,a,"");this.nodeType===1&&this.removeAttribute(a)})},addClass:function(a){if(c.isFunction(a))return this.each(function(n){var r=c(this);r.addClass(a.call(this,n,r.attr("class")))});if(a&&typeof a==="string")for(var b=(a||"").split(ca),d=0,f=this.length;d<f;d++){var e=this[d];if(e.nodeType===1)if(e.className){for(var j=" "+e.className+" ",
+i=e.className,o=0,k=b.length;o<k;o++)if(j.indexOf(" "+b[o]+" ")<0)i+=" "+b[o];e.className=c.trim(i)}else e.className=a}return this},removeClass:function(a){if(c.isFunction(a))return this.each(function(k){var n=c(this);n.removeClass(a.call(this,k,n.attr("class")))});if(a&&typeof a==="string"||a===w)for(var b=(a||"").split(ca),d=0,f=this.length;d<f;d++){var e=this[d];if(e.nodeType===1&&e.className)if(a){for(var j=(" "+e.className+" ").replace(Aa," "),i=0,o=b.length;i<o;i++)j=j.replace(" "+b[i]+" ",
+" ");e.className=c.trim(j)}else e.className=""}return this},toggleClass:function(a,b){var d=typeof a,f=typeof b==="boolean";if(c.isFunction(a))return this.each(function(e){var j=c(this);j.toggleClass(a.call(this,e,j.attr("class"),b),b)});return this.each(function(){if(d==="string")for(var e,j=0,i=c(this),o=b,k=a.split(ca);e=k[j++];){o=f?o:!i.hasClass(e);i[o?"addClass":"removeClass"](e)}else if(d==="undefined"||d==="boolean"){this.className&&c.data(this,"__className__",this.className);this.className=
+this.className||a===false?"":c.data(this,"__className__")||""}})},hasClass:function(a){a=" "+a+" ";for(var b=0,d=this.length;b<d;b++)if((" "+this[b].className+" ").replace(Aa," ").indexOf(a)>-1)return true;return false},val:function(a){if(a===w){var b=this[0];if(b){if(c.nodeName(b,"option"))return(b.attributes.value||{}).specified?b.value:b.text;if(c.nodeName(b,"select")){var d=b.selectedIndex,f=[],e=b.options;b=b.type==="select-one";if(d<0)return null;var j=b?d:0;for(d=b?d+1:e.length;j<d;j++){var i=
+e[j];if(i.selected){a=c(i).val();if(b)return a;f.push(a)}}return f}if(Ba.test(b.type)&&!c.support.checkOn)return b.getAttribute("value")===null?"on":b.value;return(b.value||"").replace(Za,"")}return w}var o=c.isFunction(a);return this.each(function(k){var n=c(this),r=a;if(this.nodeType===1){if(o)r=a.call(this,k,n.val());if(typeof r==="number")r+="";if(c.isArray(r)&&Ba.test(this.type))this.checked=c.inArray(n.val(),r)>=0;else if(c.nodeName(this,"select")){var u=c.makeArray(r);c("option",this).each(function(){this.selected=
+c.inArray(c(this).val(),u)>=0});if(!u.length)this.selectedIndex=-1}else this.value=r}})}});c.extend({attrFn:{val:true,css:true,html:true,text:true,data:true,width:true,height:true,offset:true},attr:function(a,b,d,f){if(!a||a.nodeType===3||a.nodeType===8)return w;if(f&&b in c.attrFn)return c(a)[b](d);f=a.nodeType!==1||!c.isXMLDoc(a);var e=d!==w;b=f&&c.props[b]||b;if(a.nodeType===1){var j=$a.test(b);if(b in a&&f&&!j){if(e){b==="type"&&ab.test(a.nodeName)&&a.parentNode&&c.error("type property can't be changed");
+a[b]=d}if(c.nodeName(a,"form")&&a.getAttributeNode(b))return a.getAttributeNode(b).nodeValue;if(b==="tabIndex")return(b=a.getAttributeNode("tabIndex"))&&b.specified?b.value:bb.test(a.nodeName)||cb.test(a.nodeName)&&a.href?0:w;return a[b]}if(!c.support.style&&f&&b==="style"){if(e)a.style.cssText=""+d;return a.style.cssText}e&&a.setAttribute(b,""+d);a=!c.support.hrefNormalized&&f&&j?a.getAttribute(b,2):a.getAttribute(b);return a===null?w:a}return c.style(a,b,d)}});var O=/\.(.*)$/,db=function(a){return a.replace(/[^\w\s\.\|`]/g,
+function(b){return"\\"+b})};c.event={add:function(a,b,d,f){if(!(a.nodeType===3||a.nodeType===8)){if(a.setInterval&&a!==A&&!a.frameElement)a=A;var e,j;if(d.handler){e=d;d=e.handler}if(!d.guid)d.guid=c.guid++;if(j=c.data(a)){var i=j.events=j.events||{},o=j.handle;if(!o)j.handle=o=function(){return typeof c!=="undefined"&&!c.event.triggered?c.event.handle.apply(o.elem,arguments):w};o.elem=a;b=b.split(" ");for(var k,n=0,r;k=b[n++];){j=e?c.extend({},e):{handler:d,data:f};if(k.indexOf(".")>-1){r=k.split(".");
+k=r.shift();j.namespace=r.slice(0).sort().join(".")}else{r=[];j.namespace=""}j.type=k;j.guid=d.guid;var u=i[k],z=c.event.special[k]||{};if(!u){u=i[k]=[];if(!z.setup||z.setup.call(a,f,r,o)===false)if(a.addEventListener)a.addEventListener(k,o,false);else a.attachEvent&&a.attachEvent("on"+k,o)}if(z.add){z.add.call(a,j);if(!j.handler.guid)j.handler.guid=d.guid}u.push(j);c.event.global[k]=true}a=null}}},global:{},remove:function(a,b,d,f){if(!(a.nodeType===3||a.nodeType===8)){var e,j=0,i,o,k,n,r,u,z=c.data(a),
+C=z&&z.events;if(z&&C){if(b&&b.type){d=b.handler;b=b.type}if(!b||typeof b==="string"&&b.charAt(0)==="."){b=b||"";for(e in C)c.event.remove(a,e+b)}else{for(b=b.split(" ");e=b[j++];){n=e;i=e.indexOf(".")<0;o=[];if(!i){o=e.split(".");e=o.shift();k=new RegExp("(^|\\.)"+c.map(o.slice(0).sort(),db).join("\\.(?:.*\\.)?")+"(\\.|$)")}if(r=C[e])if(d){n=c.event.special[e]||{};for(B=f||0;B<r.length;B++){u=r[B];if(d.guid===u.guid){if(i||k.test(u.namespace)){f==null&&r.splice(B--,1);n.remove&&n.remove.call(a,u)}if(f!=
+null)break}}if(r.length===0||f!=null&&r.length===1){if(!n.teardown||n.teardown.call(a,o)===false)Ca(a,e,z.handle);delete C[e]}}else for(var B=0;B<r.length;B++){u=r[B];if(i||k.test(u.namespace)){c.event.remove(a,n,u.handler,B);r.splice(B--,1)}}}if(c.isEmptyObject(C)){if(b=z.handle)b.elem=null;delete z.events;delete z.handle;c.isEmptyObject(z)&&c.removeData(a)}}}}},trigger:function(a,b,d,f){var e=a.type||a;if(!f){a=typeof a==="object"?a[G]?a:c.extend(c.Event(e),a):c.Event(e);if(e.indexOf("!")>=0){a.type=
+e=e.slice(0,-1);a.exclusive=true}if(!d){a.stopPropagation();c.event.global[e]&&c.each(c.cache,function(){this.events&&this.events[e]&&c.event.trigger(a,b,this.handle.elem)})}if(!d||d.nodeType===3||d.nodeType===8)return w;a.result=w;a.target=d;b=c.makeArray(b);b.unshift(a)}a.currentTarget=d;(f=c.data(d,"handle"))&&f.apply(d,b);f=d.parentNode||d.ownerDocument;try{if(!(d&&d.nodeName&&c.noData[d.nodeName.toLowerCase()]))if(d["on"+e]&&d["on"+e].apply(d,b)===false)a.result=false}catch(j){}if(!a.isPropagationStopped()&&
+f)c.event.trigger(a,b,f,true);else if(!a.isDefaultPrevented()){f=a.target;var i,o=c.nodeName(f,"a")&&e==="click",k=c.event.special[e]||{};if((!k._default||k._default.call(d,a)===false)&&!o&&!(f&&f.nodeName&&c.noData[f.nodeName.toLowerCase()])){try{if(f[e]){if(i=f["on"+e])f["on"+e]=null;c.event.triggered=true;f[e]()}}catch(n){}if(i)f["on"+e]=i;c.event.triggered=false}}},handle:function(a){var b,d,f,e;a=arguments[0]=c.event.fix(a||A.event);a.currentTarget=this;b=a.type.indexOf(".")<0&&!a.exclusive;
+if(!b){d=a.type.split(".");a.type=d.shift();f=new RegExp("(^|\\.)"+d.slice(0).sort().join("\\.(?:.*\\.)?")+"(\\.|$)")}e=c.data(this,"events");d=e[a.type];if(e&&d){d=d.slice(0);e=0;for(var j=d.length;e<j;e++){var i=d[e];if(b||f.test(i.namespace)){a.handler=i.handler;a.data=i.data;a.handleObj=i;i=i.handler.apply(this,arguments);if(i!==w){a.result=i;if(i===false){a.preventDefault();a.stopPropagation()}}if(a.isImmediatePropagationStopped())break}}}return a.result},props:"altKey attrChange attrName bubbles button cancelable charCode clientX clientY ctrlKey currentTarget data detail eventPhase fromElement handler keyCode layerX layerY metaKey newValue offsetX offsetY originalTarget pageX pageY prevValue relatedNode relatedTarget screenX screenY shiftKey srcElement target toElement view wheelDelta which".split(" "),
+fix:function(a){if(a[G])return a;var b=a;a=c.Event(b);for(var d=this.props.length,f;d;){f=this.props[--d];a[f]=b[f]}if(!a.target)a.target=a.srcElement||s;if(a.target.nodeType===3)a.target=a.target.parentNode;if(!a.relatedTarget&&a.fromElement)a.relatedTarget=a.fromElement===a.target?a.toElement:a.fromElement;if(a.pageX==null&&a.clientX!=null){b=s.documentElement;d=s.body;a.pageX=a.clientX+(b&&b.scrollLeft||d&&d.scrollLeft||0)-(b&&b.clientLeft||d&&d.clientLeft||0);a.pageY=a.clientY+(b&&b.scrollTop||
+d&&d.scrollTop||0)-(b&&b.clientTop||d&&d.clientTop||0)}if(!a.which&&(a.charCode||a.charCode===0?a.charCode:a.keyCode))a.which=a.charCode||a.keyCode;if(!a.metaKey&&a.ctrlKey)a.metaKey=a.ctrlKey;if(!a.which&&a.button!==w)a.which=a.button&1?1:a.button&2?3:a.button&4?2:0;return a},guid:1E8,proxy:c.proxy,special:{ready:{setup:c.bindReady,teardown:c.noop},live:{add:function(a){c.event.add(this,a.origType,c.extend({},a,{handler:oa}))},remove:function(a){var b=true,d=a.origType.replace(O,"");c.each(c.data(this,
+"events").live||[],function(){if(d===this.origType.replace(O,""))return b=false});b&&c.event.remove(this,a.origType,oa)}},beforeunload:{setup:function(a,b,d){if(this.setInterval)this.onbeforeunload=d;return false},teardown:function(a,b){if(this.onbeforeunload===b)this.onbeforeunload=null}}}};var Ca=s.removeEventListener?function(a,b,d){a.removeEventListener(b,d,false)}:function(a,b,d){a.detachEvent("on"+b,d)};c.Event=function(a){if(!this.preventDefault)return new c.Event(a);if(a&&a.type){this.originalEvent=
+a;this.type=a.type}else this.type=a;this.timeStamp=J();this[G]=true};c.Event.prototype={preventDefault:function(){this.isDefaultPrevented=Z;var a=this.originalEvent;if(a){a.preventDefault&&a.preventDefault();a.returnValue=false}},stopPropagation:function(){this.isPropagationStopped=Z;var a=this.originalEvent;if(a){a.stopPropagation&&a.stopPropagation();a.cancelBubble=true}},stopImmediatePropagation:function(){this.isImmediatePropagationStopped=Z;this.stopPropagation()},isDefaultPrevented:Y,isPropagationStopped:Y,
+isImmediatePropagationStopped:Y};var Da=function(a){var b=a.relatedTarget;try{for(;b&&b!==this;)b=b.parentNode;if(b!==this){a.type=a.data;c.event.handle.apply(this,arguments)}}catch(d){}},Ea=function(a){a.type=a.data;c.event.handle.apply(this,arguments)};c.each({mouseenter:"mouseover",mouseleave:"mouseout"},function(a,b){c.event.special[a]={setup:function(d){c.event.add(this,b,d&&d.selector?Ea:Da,a)},teardown:function(d){c.event.remove(this,b,d&&d.selector?Ea:Da)}}});if(!c.support.submitBubbles)c.event.special.submit=
+{setup:function(){if(this.nodeName.toLowerCase()!=="form"){c.event.add(this,"click.specialSubmit",function(a){var b=a.target,d=b.type;if((d==="submit"||d==="image")&&c(b).closest("form").length)return na("submit",this,arguments)});c.event.add(this,"keypress.specialSubmit",function(a){var b=a.target,d=b.type;if((d==="text"||d==="password")&&c(b).closest("form").length&&a.keyCode===13)return na("submit",this,arguments)})}else return false},teardown:function(){c.event.remove(this,".specialSubmit")}};
+if(!c.support.changeBubbles){var da=/textarea|input|select/i,ea,Fa=function(a){var b=a.type,d=a.value;if(b==="radio"||b==="checkbox")d=a.checked;else if(b==="select-multiple")d=a.selectedIndex>-1?c.map(a.options,function(f){return f.selected}).join("-"):"";else if(a.nodeName.toLowerCase()==="select")d=a.selectedIndex;return d},fa=function(a,b){var d=a.target,f,e;if(!(!da.test(d.nodeName)||d.readOnly)){f=c.data(d,"_change_data");e=Fa(d);if(a.type!=="focusout"||d.type!=="radio")c.data(d,"_change_data",
+e);if(!(f===w||e===f))if(f!=null||e){a.type="change";return c.event.trigger(a,b,d)}}};c.event.special.change={filters:{focusout:fa,click:function(a){var b=a.target,d=b.type;if(d==="radio"||d==="checkbox"||b.nodeName.toLowerCase()==="select")return fa.call(this,a)},keydown:function(a){var b=a.target,d=b.type;if(a.keyCode===13&&b.nodeName.toLowerCase()!=="textarea"||a.keyCode===32&&(d==="checkbox"||d==="radio")||d==="select-multiple")return fa.call(this,a)},beforeactivate:function(a){a=a.target;c.data(a,
+"_change_data",Fa(a))}},setup:function(){if(this.type==="file")return false;for(var a in ea)c.event.add(this,a+".specialChange",ea[a]);return da.test(this.nodeName)},teardown:function(){c.event.remove(this,".specialChange");return da.test(this.nodeName)}};ea=c.event.special.change.filters}s.addEventListener&&c.each({focus:"focusin",blur:"focusout"},function(a,b){function d(f){f=c.event.fix(f);f.type=b;return c.event.handle.call(this,f)}c.event.special[b]={setup:function(){this.addEventListener(a,
+d,true)},teardown:function(){this.removeEventListener(a,d,true)}}});c.each(["bind","one"],function(a,b){c.fn[b]=function(d,f,e){if(typeof d==="object"){for(var j in d)this[b](j,f,d[j],e);return this}if(c.isFunction(f)){e=f;f=w}var i=b==="one"?c.proxy(e,function(k){c(this).unbind(k,i);return e.apply(this,arguments)}):e;if(d==="unload"&&b!=="one")this.one(d,f,e);else{j=0;for(var o=this.length;j<o;j++)c.event.add(this[j],d,i,f)}return this}});c.fn.extend({unbind:function(a,b){if(typeof a==="object"&&
+!a.preventDefault)for(var d in a)this.unbind(d,a[d]);else{d=0;for(var f=this.length;d<f;d++)c.event.remove(this[d],a,b)}return this},delegate:function(a,b,d,f){return this.live(b,d,f,a)},undelegate:function(a,b,d){return arguments.length===0?this.unbind("live"):this.die(b,null,d,a)},trigger:function(a,b){return this.each(function(){c.event.trigger(a,b,this)})},triggerHandler:function(a,b){if(this[0]){a=c.Event(a);a.preventDefault();a.stopPropagation();c.event.trigger(a,b,this[0]);return a.result}},
+toggle:function(a){for(var b=arguments,d=1;d<b.length;)c.proxy(a,b[d++]);return this.click(c.proxy(a,function(f){var e=(c.data(this,"lastToggle"+a.guid)||0)%d;c.data(this,"lastToggle"+a.guid,e+1);f.preventDefault();return b[e].apply(this,arguments)||false}))},hover:function(a,b){return this.mouseenter(a).mouseleave(b||a)}});var Ga={focus:"focusin",blur:"focusout",mouseenter:"mouseover",mouseleave:"mouseout"};c.each(["live","die"],function(a,b){c.fn[b]=function(d,f,e,j){var i,o=0,k,n,r=j||this.selector,
+u=j?this:c(this.context);if(c.isFunction(f)){e=f;f=w}for(d=(d||"").split(" ");(i=d[o++])!=null;){j=O.exec(i);k="";if(j){k=j[0];i=i.replace(O,"")}if(i==="hover")d.push("mouseenter"+k,"mouseleave"+k);else{n=i;if(i==="focus"||i==="blur"){d.push(Ga[i]+k);i+=k}else i=(Ga[i]||i)+k;b==="live"?u.each(function(){c.event.add(this,pa(i,r),{data:f,selector:r,handler:e,origType:i,origHandler:e,preType:n})}):u.unbind(pa(i,r),e)}}return this}});c.each("blur focus focusin focusout load resize scroll unload click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup error".split(" "),
+function(a,b){c.fn[b]=function(d){return d?this.bind(b,d):this.trigger(b)};if(c.attrFn)c.attrFn[b]=true});A.attachEvent&&!A.addEventListener&&A.attachEvent("onunload",function(){for(var a in c.cache)if(c.cache[a].handle)try{c.event.remove(c.cache[a].handle.elem)}catch(b){}});(function(){function a(g){for(var h="",l,m=0;g[m];m++){l=g[m];if(l.nodeType===3||l.nodeType===4)h+=l.nodeValue;else if(l.nodeType!==8)h+=a(l.childNodes)}return h}function b(g,h,l,m,q,p){q=0;for(var v=m.length;q<v;q++){var t=m[q];
+if(t){t=t[g];for(var y=false;t;){if(t.sizcache===l){y=m[t.sizset];break}if(t.nodeType===1&&!p){t.sizcache=l;t.sizset=q}if(t.nodeName.toLowerCase()===h){y=t;break}t=t[g]}m[q]=y}}}function d(g,h,l,m,q,p){q=0;for(var v=m.length;q<v;q++){var t=m[q];if(t){t=t[g];for(var y=false;t;){if(t.sizcache===l){y=m[t.sizset];break}if(t.nodeType===1){if(!p){t.sizcache=l;t.sizset=q}if(typeof h!=="string"){if(t===h){y=true;break}}else if(k.filter(h,[t]).length>0){y=t;break}}t=t[g]}m[q]=y}}}var f=/((?:\((?:\([^()]+\)|[^()]+)+\)|\[(?:\[[^[\]]*\]|['"][^'"]*['"]|[^[\]'"]+)+\]|\\.|[^ >+~,(\[\\]+)+|[>+~])(\s*,\s*)?((?:.|\r|\n)*)/g,
+e=0,j=Object.prototype.toString,i=false,o=true;[0,0].sort(function(){o=false;return 0});var k=function(g,h,l,m){l=l||[];var q=h=h||s;if(h.nodeType!==1&&h.nodeType!==9)return[];if(!g||typeof g!=="string")return l;for(var p=[],v,t,y,S,H=true,M=x(h),I=g;(f.exec(""),v=f.exec(I))!==null;){I=v[3];p.push(v[1]);if(v[2]){S=v[3];break}}if(p.length>1&&r.exec(g))if(p.length===2&&n.relative[p[0]])t=ga(p[0]+p[1],h);else for(t=n.relative[p[0]]?[h]:k(p.shift(),h);p.length;){g=p.shift();if(n.relative[g])g+=p.shift();
+t=ga(g,t)}else{if(!m&&p.length>1&&h.nodeType===9&&!M&&n.match.ID.test(p[0])&&!n.match.ID.test(p[p.length-1])){v=k.find(p.shift(),h,M);h=v.expr?k.filter(v.expr,v.set)[0]:v.set[0]}if(h){v=m?{expr:p.pop(),set:z(m)}:k.find(p.pop(),p.length===1&&(p[0]==="~"||p[0]==="+")&&h.parentNode?h.parentNode:h,M);t=v.expr?k.filter(v.expr,v.set):v.set;if(p.length>0)y=z(t);else H=false;for(;p.length;){var D=p.pop();v=D;if(n.relative[D])v=p.pop();else D="";if(v==null)v=h;n.relative[D](y,v,M)}}else y=[]}y||(y=t);y||k.error(D||
+g);if(j.call(y)==="[object Array]")if(H)if(h&&h.nodeType===1)for(g=0;y[g]!=null;g++){if(y[g]&&(y[g]===true||y[g].nodeType===1&&E(h,y[g])))l.push(t[g])}else for(g=0;y[g]!=null;g++)y[g]&&y[g].nodeType===1&&l.push(t[g]);else l.push.apply(l,y);else z(y,l);if(S){k(S,q,l,m);k.uniqueSort(l)}return l};k.uniqueSort=function(g){if(B){i=o;g.sort(B);if(i)for(var h=1;h<g.length;h++)g[h]===g[h-1]&&g.splice(h--,1)}return g};k.matches=function(g,h){return k(g,null,null,h)};k.find=function(g,h,l){var m,q;if(!g)return[];
+for(var p=0,v=n.order.length;p<v;p++){var t=n.order[p];if(q=n.leftMatch[t].exec(g)){var y=q[1];q.splice(1,1);if(y.substr(y.length-1)!=="\\"){q[1]=(q[1]||"").replace(/\\/g,"");m=n.find[t](q,h,l);if(m!=null){g=g.replace(n.match[t],"");break}}}}m||(m=h.getElementsByTagName("*"));return{set:m,expr:g}};k.filter=function(g,h,l,m){for(var q=g,p=[],v=h,t,y,S=h&&h[0]&&x(h[0]);g&&h.length;){for(var H in n.filter)if((t=n.leftMatch[H].exec(g))!=null&&t[2]){var M=n.filter[H],I,D;D=t[1];y=false;t.splice(1,1);if(D.substr(D.length-
+1)!=="\\"){if(v===p)p=[];if(n.preFilter[H])if(t=n.preFilter[H](t,v,l,p,m,S)){if(t===true)continue}else y=I=true;if(t)for(var U=0;(D=v[U])!=null;U++)if(D){I=M(D,t,U,v);var Ha=m^!!I;if(l&&I!=null)if(Ha)y=true;else v[U]=false;else if(Ha){p.push(D);y=true}}if(I!==w){l||(v=p);g=g.replace(n.match[H],"");if(!y)return[];break}}}if(g===q)if(y==null)k.error(g);else break;q=g}return v};k.error=function(g){throw"Syntax error, unrecognized expression: "+g;};var n=k.selectors={order:["ID","NAME","TAG"],match:{ID:/#((?:[\w\u00c0-\uFFFF-]|\\.)+)/,
+CLASS:/\.((?:[\w\u00c0-\uFFFF-]|\\.)+)/,NAME:/\[name=['"]*((?:[\w\u00c0-\uFFFF-]|\\.)+)['"]*\]/,ATTR:/\[\s*((?:[\w\u00c0-\uFFFF-]|\\.)+)\s*(?:(\S?=)\s*(['"]*)(.*?)\3|)\s*\]/,TAG:/^((?:[\w\u00c0-\uFFFF\*-]|\\.)+)/,CHILD:/:(only|nth|last|first)-child(?:\((even|odd|[\dn+-]*)\))?/,POS:/:(nth|eq|gt|lt|first|last|even|odd)(?:\((\d*)\))?(?=[^-]|$)/,PSEUDO:/:((?:[\w\u00c0-\uFFFF-]|\\.)+)(?:\((['"]?)((?:\([^\)]+\)|[^\(\)]*)+)\2\))?/},leftMatch:{},attrMap:{"class":"className","for":"htmlFor"},attrHandle:{href:function(g){return g.getAttribute("href")}},
+relative:{"+":function(g,h){var l=typeof h==="string",m=l&&!/\W/.test(h);l=l&&!m;if(m)h=h.toLowerCase();m=0;for(var q=g.length,p;m<q;m++)if(p=g[m]){for(;(p=p.previousSibling)&&p.nodeType!==1;);g[m]=l||p&&p.nodeName.toLowerCase()===h?p||false:p===h}l&&k.filter(h,g,true)},">":function(g,h){var l=typeof h==="string";if(l&&!/\W/.test(h)){h=h.toLowerCase();for(var m=0,q=g.length;m<q;m++){var p=g[m];if(p){l=p.parentNode;g[m]=l.nodeName.toLowerCase()===h?l:false}}}else{m=0;for(q=g.length;m<q;m++)if(p=g[m])g[m]=
+l?p.parentNode:p.parentNode===h;l&&k.filter(h,g,true)}},"":function(g,h,l){var m=e++,q=d;if(typeof h==="string"&&!/\W/.test(h)){var p=h=h.toLowerCase();q=b}q("parentNode",h,m,g,p,l)},"~":function(g,h,l){var m=e++,q=d;if(typeof h==="string"&&!/\W/.test(h)){var p=h=h.toLowerCase();q=b}q("previousSibling",h,m,g,p,l)}},find:{ID:function(g,h,l){if(typeof h.getElementById!=="undefined"&&!l)return(g=h.getElementById(g[1]))?[g]:[]},NAME:function(g,h){if(typeof h.getElementsByName!=="undefined"){var l=[];
+h=h.getElementsByName(g[1]);for(var m=0,q=h.length;m<q;m++)h[m].getAttribute("name")===g[1]&&l.push(h[m]);return l.length===0?null:l}},TAG:function(g,h){return h.getElementsByTagName(g[1])}},preFilter:{CLASS:function(g,h,l,m,q,p){g=" "+g[1].replace(/\\/g,"")+" ";if(p)return g;p=0;for(var v;(v=h[p])!=null;p++)if(v)if(q^(v.className&&(" "+v.className+" ").replace(/[\t\n]/g," ").indexOf(g)>=0))l||m.push(v);else if(l)h[p]=false;return false},ID:function(g){return g[1].replace(/\\/g,"")},TAG:function(g){return g[1].toLowerCase()},
+CHILD:function(g){if(g[1]==="nth"){var h=/(-?)(\d*)n((?:\+|-)?\d*)/.exec(g[2]==="even"&&"2n"||g[2]==="odd"&&"2n+1"||!/\D/.test(g[2])&&"0n+"+g[2]||g[2]);g[2]=h[1]+(h[2]||1)-0;g[3]=h[3]-0}g[0]=e++;return g},ATTR:function(g,h,l,m,q,p){h=g[1].replace(/\\/g,"");if(!p&&n.attrMap[h])g[1]=n.attrMap[h];if(g[2]==="~=")g[4]=" "+g[4]+" ";return g},PSEUDO:function(g,h,l,m,q){if(g[1]==="not")if((f.exec(g[3])||"").length>1||/^\w/.test(g[3]))g[3]=k(g[3],null,null,h);else{g=k.filter(g[3],h,l,true^q);l||m.push.apply(m,
+g);return false}else if(n.match.POS.test(g[0])||n.match.CHILD.test(g[0]))return true;return g},POS:function(g){g.unshift(true);return g}},filters:{enabled:function(g){return g.disabled===false&&g.type!=="hidden"},disabled:function(g){return g.disabled===true},checked:function(g){return g.checked===true},selected:function(g){return g.selected===true},parent:function(g){return!!g.firstChild},empty:function(g){return!g.firstChild},has:function(g,h,l){return!!k(l[3],g).length},header:function(g){return/h\d/i.test(g.nodeName)},
+text:function(g){return"text"===g.type},radio:function(g){return"radio"===g.type},checkbox:function(g){return"checkbox"===g.type},file:function(g){return"file"===g.type},password:function(g){return"password"===g.type},submit:function(g){return"submit"===g.type},image:function(g){return"image"===g.type},reset:function(g){return"reset"===g.type},button:function(g){return"button"===g.type||g.nodeName.toLowerCase()==="button"},input:function(g){return/input|select|textarea|button/i.test(g.nodeName)}},
+setFilters:{first:function(g,h){return h===0},last:function(g,h,l,m){return h===m.length-1},even:function(g,h){return h%2===0},odd:function(g,h){return h%2===1},lt:function(g,h,l){return h<l[3]-0},gt:function(g,h,l){return h>l[3]-0},nth:function(g,h,l){return l[3]-0===h},eq:function(g,h,l){return l[3]-0===h}},filter:{PSEUDO:function(g,h,l,m){var q=h[1],p=n.filters[q];if(p)return p(g,l,h,m);else if(q==="contains")return(g.textContent||g.innerText||a([g])||"").indexOf(h[3])>=0;else if(q==="not"){h=
+h[3];l=0;for(m=h.length;l<m;l++)if(h[l]===g)return false;return true}else k.error("Syntax error, unrecognized expression: "+q)},CHILD:function(g,h){var l=h[1],m=g;switch(l){case "only":case "first":for(;m=m.previousSibling;)if(m.nodeType===1)return false;if(l==="first")return true;m=g;case "last":for(;m=m.nextSibling;)if(m.nodeType===1)return false;return true;case "nth":l=h[2];var q=h[3];if(l===1&&q===0)return true;h=h[0];var p=g.parentNode;if(p&&(p.sizcache!==h||!g.nodeIndex)){var v=0;for(m=p.firstChild;m;m=
+m.nextSibling)if(m.nodeType===1)m.nodeIndex=++v;p.sizcache=h}g=g.nodeIndex-q;return l===0?g===0:g%l===0&&g/l>=0}},ID:function(g,h){return g.nodeType===1&&g.getAttribute("id")===h},TAG:function(g,h){return h==="*"&&g.nodeType===1||g.nodeName.toLowerCase()===h},CLASS:function(g,h){return(" "+(g.className||g.getAttribute("class"))+" ").indexOf(h)>-1},ATTR:function(g,h){var l=h[1];g=n.attrHandle[l]?n.attrHandle[l](g):g[l]!=null?g[l]:g.getAttribute(l);l=g+"";var m=h[2];h=h[4];return g==null?m==="!=":m===
+"="?l===h:m==="*="?l.indexOf(h)>=0:m==="~="?(" "+l+" ").indexOf(h)>=0:!h?l&&g!==false:m==="!="?l!==h:m==="^="?l.indexOf(h)===0:m==="$="?l.substr(l.length-h.length)===h:m==="|="?l===h||l.substr(0,h.length+1)===h+"-":false},POS:function(g,h,l,m){var q=n.setFilters[h[2]];if(q)return q(g,l,h,m)}}},r=n.match.POS;for(var u in n.match){n.match[u]=new RegExp(n.match[u].source+/(?![^\[]*\])(?![^\(]*\))/.source);n.leftMatch[u]=new RegExp(/(^(?:.|\r|\n)*?)/.source+n.match[u].source.replace(/\\(\d+)/g,function(g,
+h){return"\\"+(h-0+1)}))}var z=function(g,h){g=Array.prototype.slice.call(g,0);if(h){h.push.apply(h,g);return h}return g};try{Array.prototype.slice.call(s.documentElement.childNodes,0)}catch(C){z=function(g,h){h=h||[];if(j.call(g)==="[object Array]")Array.prototype.push.apply(h,g);else if(typeof g.length==="number")for(var l=0,m=g.length;l<m;l++)h.push(g[l]);else for(l=0;g[l];l++)h.push(g[l]);return h}}var B;if(s.documentElement.compareDocumentPosition)B=function(g,h){if(!g.compareDocumentPosition||
+!h.compareDocumentPosition){if(g==h)i=true;return g.compareDocumentPosition?-1:1}g=g.compareDocumentPosition(h)&4?-1:g===h?0:1;if(g===0)i=true;return g};else if("sourceIndex"in s.documentElement)B=function(g,h){if(!g.sourceIndex||!h.sourceIndex){if(g==h)i=true;return g.sourceIndex?-1:1}g=g.sourceIndex-h.sourceIndex;if(g===0)i=true;return g};else if(s.createRange)B=function(g,h){if(!g.ownerDocument||!h.ownerDocument){if(g==h)i=true;return g.ownerDocument?-1:1}var l=g.ownerDocument.createRange(),m=
+h.ownerDocument.createRange();l.setStart(g,0);l.setEnd(g,0);m.setStart(h,0);m.setEnd(h,0);g=l.compareBoundaryPoints(Range.START_TO_END,m);if(g===0)i=true;return g};(function(){var g=s.createElement("div"),h="script"+(new Date).getTime();g.innerHTML="<a name='"+h+"'/>";var l=s.documentElement;l.insertBefore(g,l.firstChild);if(s.getElementById(h)){n.find.ID=function(m,q,p){if(typeof q.getElementById!=="undefined"&&!p)return(q=q.getElementById(m[1]))?q.id===m[1]||typeof q.getAttributeNode!=="undefined"&&
+q.getAttributeNode("id").nodeValue===m[1]?[q]:w:[]};n.filter.ID=function(m,q){var p=typeof m.getAttributeNode!=="undefined"&&m.getAttributeNode("id");return m.nodeType===1&&p&&p.nodeValue===q}}l.removeChild(g);l=g=null})();(function(){var g=s.createElement("div");g.appendChild(s.createComment(""));if(g.getElementsByTagName("*").length>0)n.find.TAG=function(h,l){l=l.getElementsByTagName(h[1]);if(h[1]==="*"){h=[];for(var m=0;l[m];m++)l[m].nodeType===1&&h.push(l[m]);l=h}return l};g.innerHTML="<a href='#'></a>";
+if(g.firstChild&&typeof g.firstChild.getAttribute!=="undefined"&&g.firstChild.getAttribute("href")!=="#")n.attrHandle.href=function(h){return h.getAttribute("href",2)};g=null})();s.querySelectorAll&&function(){var g=k,h=s.createElement("div");h.innerHTML="<p class='TEST'></p>";if(!(h.querySelectorAll&&h.querySelectorAll(".TEST").length===0)){k=function(m,q,p,v){q=q||s;if(!v&&q.nodeType===9&&!x(q))try{return z(q.querySelectorAll(m),p)}catch(t){}return g(m,q,p,v)};for(var l in g)k[l]=g[l];h=null}}();
+(function(){var g=s.createElement("div");g.innerHTML="<div class='test e'></div><div class='test'></div>";if(!(!g.getElementsByClassName||g.getElementsByClassName("e").length===0)){g.lastChild.className="e";if(g.getElementsByClassName("e").length!==1){n.order.splice(1,0,"CLASS");n.find.CLASS=function(h,l,m){if(typeof l.getElementsByClassName!=="undefined"&&!m)return l.getElementsByClassName(h[1])};g=null}}})();var E=s.compareDocumentPosition?function(g,h){return!!(g.compareDocumentPosition(h)&16)}:
+function(g,h){return g!==h&&(g.contains?g.contains(h):true)},x=function(g){return(g=(g?g.ownerDocument||g:0).documentElement)?g.nodeName!=="HTML":false},ga=function(g,h){var l=[],m="",q;for(h=h.nodeType?[h]:h;q=n.match.PSEUDO.exec(g);){m+=q[0];g=g.replace(n.match.PSEUDO,"")}g=n.relative[g]?g+"*":g;q=0;for(var p=h.length;q<p;q++)k(g,h[q],l);return k.filter(m,l)};c.find=k;c.expr=k.selectors;c.expr[":"]=c.expr.filters;c.unique=k.uniqueSort;c.text=a;c.isXMLDoc=x;c.contains=E})();var eb=/Until$/,fb=/^(?:parents|prevUntil|prevAll)/,
+gb=/,/;R=Array.prototype.slice;var Ia=function(a,b,d){if(c.isFunction(b))return c.grep(a,function(e,j){return!!b.call(e,j,e)===d});else if(b.nodeType)return c.grep(a,function(e){return e===b===d});else if(typeof b==="string"){var f=c.grep(a,function(e){return e.nodeType===1});if(Ua.test(b))return c.filter(b,f,!d);else b=c.filter(b,f)}return c.grep(a,function(e){return c.inArray(e,b)>=0===d})};c.fn.extend({find:function(a){for(var b=this.pushStack("","find",a),d=0,f=0,e=this.length;f<e;f++){d=b.length;
+c.find(a,this[f],b);if(f>0)for(var j=d;j<b.length;j++)for(var i=0;i<d;i++)if(b[i]===b[j]){b.splice(j--,1);break}}return b},has:function(a){var b=c(a);return this.filter(function(){for(var d=0,f=b.length;d<f;d++)if(c.contains(this,b[d]))return true})},not:function(a){return this.pushStack(Ia(this,a,false),"not",a)},filter:function(a){return this.pushStack(Ia(this,a,true),"filter",a)},is:function(a){return!!a&&c.filter(a,this).length>0},closest:function(a,b){if(c.isArray(a)){var d=[],f=this[0],e,j=
+{},i;if(f&&a.length){e=0;for(var o=a.length;e<o;e++){i=a[e];j[i]||(j[i]=c.expr.match.POS.test(i)?c(i,b||this.context):i)}for(;f&&f.ownerDocument&&f!==b;){for(i in j){e=j[i];if(e.jquery?e.index(f)>-1:c(f).is(e)){d.push({selector:i,elem:f});delete j[i]}}f=f.parentNode}}return d}var k=c.expr.match.POS.test(a)?c(a,b||this.context):null;return this.map(function(n,r){for(;r&&r.ownerDocument&&r!==b;){if(k?k.index(r)>-1:c(r).is(a))return r;r=r.parentNode}return null})},index:function(a){if(!a||typeof a===
+"string")return c.inArray(this[0],a?c(a):this.parent().children());return c.inArray(a.jquery?a[0]:a,this)},add:function(a,b){a=typeof a==="string"?c(a,b||this.context):c.makeArray(a);b=c.merge(this.get(),a);return this.pushStack(qa(a[0])||qa(b[0])?b:c.unique(b))},andSelf:function(){return this.add(this.prevObject)}});c.each({parent:function(a){return(a=a.parentNode)&&a.nodeType!==11?a:null},parents:function(a){return c.dir(a,"parentNode")},parentsUntil:function(a,b,d){return c.dir(a,"parentNode",
+d)},next:function(a){return c.nth(a,2,"nextSibling")},prev:function(a){return c.nth(a,2,"previousSibling")},nextAll:function(a){return c.dir(a,"nextSibling")},prevAll:function(a){return c.dir(a,"previousSibling")},nextUntil:function(a,b,d){return c.dir(a,"nextSibling",d)},prevUntil:function(a,b,d){return c.dir(a,"previousSibling",d)},siblings:function(a){return c.sibling(a.parentNode.firstChild,a)},children:function(a){return c.sibling(a.firstChild)},contents:function(a){return c.nodeName(a,"iframe")?
+a.contentDocument||a.contentWindow.document:c.makeArray(a.childNodes)}},function(a,b){c.fn[a]=function(d,f){var e=c.map(this,b,d);eb.test(a)||(f=d);if(f&&typeof f==="string")e=c.filter(f,e);e=this.length>1?c.unique(e):e;if((this.length>1||gb.test(f))&&fb.test(a))e=e.reverse();return this.pushStack(e,a,R.call(arguments).join(","))}});c.extend({filter:function(a,b,d){if(d)a=":not("+a+")";return c.find.matches(a,b)},dir:function(a,b,d){var f=[];for(a=a[b];a&&a.nodeType!==9&&(d===w||a.nodeType!==1||!c(a).is(d));){a.nodeType===
+1&&f.push(a);a=a[b]}return f},nth:function(a,b,d){b=b||1;for(var f=0;a;a=a[d])if(a.nodeType===1&&++f===b)break;return a},sibling:function(a,b){for(var d=[];a;a=a.nextSibling)a.nodeType===1&&a!==b&&d.push(a);return d}});var Ja=/ jQuery\d+="(?:\d+|null)"/g,V=/^\s+/,Ka=/(<([\w:]+)[^>]*?)\/>/g,hb=/^(?:area|br|col|embed|hr|img|input|link|meta|param)$/i,La=/<([\w:]+)/,ib=/<tbody/i,jb=/<|&#?\w+;/,ta=/<script|<object|<embed|<option|<style/i,ua=/checked\s*(?:[^=]|=\s*.checked.)/i,Ma=function(a,b,d){return hb.test(d)?
+a:b+"></"+d+">"},F={option:[1,"<select multiple='multiple'>","</select>"],legend:[1,"<fieldset>","</fieldset>"],thead:[1,"<table>","</table>"],tr:[2,"<table><tbody>","</tbody></table>"],td:[3,"<table><tbody><tr>","</tr></tbody></table>"],col:[2,"<table><tbody></tbody><colgroup>","</colgroup></table>"],area:[1,"<map>","</map>"],_default:[0,"",""]};F.optgroup=F.option;F.tbody=F.tfoot=F.colgroup=F.caption=F.thead;F.th=F.td;if(!c.support.htmlSerialize)F._default=[1,"div<div>","</div>"];c.fn.extend({text:function(a){if(c.isFunction(a))return this.each(function(b){var d=
+c(this);d.text(a.call(this,b,d.text()))});if(typeof a!=="object"&&a!==w)return this.empty().append((this[0]&&this[0].ownerDocument||s).createTextNode(a));return c.text(this)},wrapAll:function(a){if(c.isFunction(a))return this.each(function(d){c(this).wrapAll(a.call(this,d))});if(this[0]){var b=c(a,this[0].ownerDocument).eq(0).clone(true);this[0].parentNode&&b.insertBefore(this[0]);b.map(function(){for(var d=this;d.firstChild&&d.firstChild.nodeType===1;)d=d.firstChild;return d}).append(this)}return this},
+wrapInner:function(a){if(c.isFunction(a))return this.each(function(b){c(this).wrapInner(a.call(this,b))});return this.each(function(){var b=c(this),d=b.contents();d.length?d.wrapAll(a):b.append(a)})},wrap:function(a){return this.each(function(){c(this).wrapAll(a)})},unwrap:function(){return this.parent().each(function(){c.nodeName(this,"body")||c(this).replaceWith(this.childNodes)}).end()},append:function(){return this.domManip(arguments,true,function(a){this.nodeType===1&&this.appendChild(a)})},
+prepend:function(){return this.domManip(arguments,true,function(a){this.nodeType===1&&this.insertBefore(a,this.firstChild)})},before:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,false,function(b){this.parentNode.insertBefore(b,this)});else if(arguments.length){var a=c(arguments[0]);a.push.apply(a,this.toArray());return this.pushStack(a,"before",arguments)}},after:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,false,function(b){this.parentNode.insertBefore(b,
+this.nextSibling)});else if(arguments.length){var a=this.pushStack(this,"after",arguments);a.push.apply(a,c(arguments[0]).toArray());return a}},remove:function(a,b){for(var d=0,f;(f=this[d])!=null;d++)if(!a||c.filter(a,[f]).length){if(!b&&f.nodeType===1){c.cleanData(f.getElementsByTagName("*"));c.cleanData([f])}f.parentNode&&f.parentNode.removeChild(f)}return this},empty:function(){for(var a=0,b;(b=this[a])!=null;a++)for(b.nodeType===1&&c.cleanData(b.getElementsByTagName("*"));b.firstChild;)b.removeChild(b.firstChild);
+return this},clone:function(a){var b=this.map(function(){if(!c.support.noCloneEvent&&!c.isXMLDoc(this)){var d=this.outerHTML,f=this.ownerDocument;if(!d){d=f.createElement("div");d.appendChild(this.cloneNode(true));d=d.innerHTML}return c.clean([d.replace(Ja,"").replace(/=([^="'>\s]+\/)>/g,'="$1">').replace(V,"")],f)[0]}else return this.cloneNode(true)});if(a===true){ra(this,b);ra(this.find("*"),b.find("*"))}return b},html:function(a){if(a===w)return this[0]&&this[0].nodeType===1?this[0].innerHTML.replace(Ja,
+""):null;else if(typeof a==="string"&&!ta.test(a)&&(c.support.leadingWhitespace||!V.test(a))&&!F[(La.exec(a)||["",""])[1].toLowerCase()]){a=a.replace(Ka,Ma);try{for(var b=0,d=this.length;b<d;b++)if(this[b].nodeType===1){c.cleanData(this[b].getElementsByTagName("*"));this[b].innerHTML=a}}catch(f){this.empty().append(a)}}else c.isFunction(a)?this.each(function(e){var j=c(this),i=j.html();j.empty().append(function(){return a.call(this,e,i)})}):this.empty().append(a);return this},replaceWith:function(a){if(this[0]&&
+this[0].parentNode){if(c.isFunction(a))return this.each(function(b){var d=c(this),f=d.html();d.replaceWith(a.call(this,b,f))});if(typeof a!=="string")a=c(a).detach();return this.each(function(){var b=this.nextSibling,d=this.parentNode;c(this).remove();b?c(b).before(a):c(d).append(a)})}else return this.pushStack(c(c.isFunction(a)?a():a),"replaceWith",a)},detach:function(a){return this.remove(a,true)},domManip:function(a,b,d){function f(u){return c.nodeName(u,"table")?u.getElementsByTagName("tbody")[0]||
+u.appendChild(u.ownerDocument.createElement("tbody")):u}var e,j,i=a[0],o=[],k;if(!c.support.checkClone&&arguments.length===3&&typeof i==="string"&&ua.test(i))return this.each(function(){c(this).domManip(a,b,d,true)});if(c.isFunction(i))return this.each(function(u){var z=c(this);a[0]=i.call(this,u,b?z.html():w);z.domManip(a,b,d)});if(this[0]){e=i&&i.parentNode;e=c.support.parentNode&&e&&e.nodeType===11&&e.childNodes.length===this.length?{fragment:e}:sa(a,this,o);k=e.fragment;if(j=k.childNodes.length===
+1?(k=k.firstChild):k.firstChild){b=b&&c.nodeName(j,"tr");for(var n=0,r=this.length;n<r;n++)d.call(b?f(this[n],j):this[n],n>0||e.cacheable||this.length>1?k.cloneNode(true):k)}o.length&&c.each(o,Qa)}return this}});c.fragments={};c.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(a,b){c.fn[a]=function(d){var f=[];d=c(d);var e=this.length===1&&this[0].parentNode;if(e&&e.nodeType===11&&e.childNodes.length===1&&d.length===1){d[b](this[0]);
+return this}else{e=0;for(var j=d.length;e<j;e++){var i=(e>0?this.clone(true):this).get();c.fn[b].apply(c(d[e]),i);f=f.concat(i)}return this.pushStack(f,a,d.selector)}}});c.extend({clean:function(a,b,d,f){b=b||s;if(typeof b.createElement==="undefined")b=b.ownerDocument||b[0]&&b[0].ownerDocument||s;for(var e=[],j=0,i;(i=a[j])!=null;j++){if(typeof i==="number")i+="";if(i){if(typeof i==="string"&&!jb.test(i))i=b.createTextNode(i);else if(typeof i==="string"){i=i.replace(Ka,Ma);var o=(La.exec(i)||["",
+""])[1].toLowerCase(),k=F[o]||F._default,n=k[0],r=b.createElement("div");for(r.innerHTML=k[1]+i+k[2];n--;)r=r.lastChild;if(!c.support.tbody){n=ib.test(i);o=o==="table"&&!n?r.firstChild&&r.firstChild.childNodes:k[1]==="<table>"&&!n?r.childNodes:[];for(k=o.length-1;k>=0;--k)c.nodeName(o[k],"tbody")&&!o[k].childNodes.length&&o[k].parentNode.removeChild(o[k])}!c.support.leadingWhitespace&&V.test(i)&&r.insertBefore(b.createTextNode(V.exec(i)[0]),r.firstChild);i=r.childNodes}if(i.nodeType)e.push(i);else e=
+c.merge(e,i)}}if(d)for(j=0;e[j];j++)if(f&&c.nodeName(e[j],"script")&&(!e[j].type||e[j].type.toLowerCase()==="text/javascript"))f.push(e[j].parentNode?e[j].parentNode.removeChild(e[j]):e[j]);else{e[j].nodeType===1&&e.splice.apply(e,[j+1,0].concat(c.makeArray(e[j].getElementsByTagName("script"))));d.appendChild(e[j])}return e},cleanData:function(a){for(var b,d,f=c.cache,e=c.event.special,j=c.support.deleteExpando,i=0,o;(o=a[i])!=null;i++)if(d=o[c.expando]){b=f[d];if(b.events)for(var k in b.events)e[k]?
+c.event.remove(o,k):Ca(o,k,b.handle);if(j)delete o[c.expando];else o.removeAttribute&&o.removeAttribute(c.expando);delete f[d]}}});var kb=/z-?index|font-?weight|opacity|zoom|line-?height/i,Na=/alpha\([^)]*\)/,Oa=/opacity=([^)]*)/,ha=/float/i,ia=/-([a-z])/ig,lb=/([A-Z])/g,mb=/^-?\d+(?:px)?$/i,nb=/^-?\d/,ob={position:"absolute",visibility:"hidden",display:"block"},pb=["Left","Right"],qb=["Top","Bottom"],rb=s.defaultView&&s.defaultView.getComputedStyle,Pa=c.support.cssFloat?"cssFloat":"styleFloat",ja=
+function(a,b){return b.toUpperCase()};c.fn.css=function(a,b){return X(this,a,b,true,function(d,f,e){if(e===w)return c.curCSS(d,f);if(typeof e==="number"&&!kb.test(f))e+="px";c.style(d,f,e)})};c.extend({style:function(a,b,d){if(!a||a.nodeType===3||a.nodeType===8)return w;if((b==="width"||b==="height")&&parseFloat(d)<0)d=w;var f=a.style||a,e=d!==w;if(!c.support.opacity&&b==="opacity"){if(e){f.zoom=1;b=parseInt(d,10)+""==="NaN"?"":"alpha(opacity="+d*100+")";a=f.filter||c.curCSS(a,"filter")||"";f.filter=
+Na.test(a)?a.replace(Na,b):b}return f.filter&&f.filter.indexOf("opacity=")>=0?parseFloat(Oa.exec(f.filter)[1])/100+"":""}if(ha.test(b))b=Pa;b=b.replace(ia,ja);if(e)f[b]=d;return f[b]},css:function(a,b,d,f){if(b==="width"||b==="height"){var e,j=b==="width"?pb:qb;function i(){e=b==="width"?a.offsetWidth:a.offsetHeight;f!=="border"&&c.each(j,function(){f||(e-=parseFloat(c.curCSS(a,"padding"+this,true))||0);if(f==="margin")e+=parseFloat(c.curCSS(a,"margin"+this,true))||0;else e-=parseFloat(c.curCSS(a,
+"border"+this+"Width",true))||0})}a.offsetWidth!==0?i():c.swap(a,ob,i);return Math.max(0,Math.round(e))}return c.curCSS(a,b,d)},curCSS:function(a,b,d){var f,e=a.style;if(!c.support.opacity&&b==="opacity"&&a.currentStyle){f=Oa.test(a.currentStyle.filter||"")?parseFloat(RegExp.$1)/100+"":"";return f===""?"1":f}if(ha.test(b))b=Pa;if(!d&&e&&e[b])f=e[b];else if(rb){if(ha.test(b))b="float";b=b.replace(lb,"-$1").toLowerCase();e=a.ownerDocument.defaultView;if(!e)return null;if(a=e.getComputedStyle(a,null))f=
+a.getPropertyValue(b);if(b==="opacity"&&f==="")f="1"}else if(a.currentStyle){d=b.replace(ia,ja);f=a.currentStyle[b]||a.currentStyle[d];if(!mb.test(f)&&nb.test(f)){b=e.left;var j=a.runtimeStyle.left;a.runtimeStyle.left=a.currentStyle.left;e.left=d==="fontSize"?"1em":f||0;f=e.pixelLeft+"px";e.left=b;a.runtimeStyle.left=j}}return f},swap:function(a,b,d){var f={};for(var e in b){f[e]=a.style[e];a.style[e]=b[e]}d.call(a);for(e in b)a.style[e]=f[e]}});if(c.expr&&c.expr.filters){c.expr.filters.hidden=function(a){var b=
+a.offsetWidth,d=a.offsetHeight,f=a.nodeName.toLowerCase()==="tr";return b===0&&d===0&&!f?true:b>0&&d>0&&!f?false:c.curCSS(a,"display")==="none"};c.expr.filters.visible=function(a){return!c.expr.filters.hidden(a)}}var sb=J(),tb=/<script(.|\s)*?\/script>/gi,ub=/select|textarea/i,vb=/color|date|datetime|email|hidden|month|number|password|range|search|tel|text|time|url|week/i,N=/=\?(&|$)/,ka=/\?/,wb=/(\?|&)_=.*?(&|$)/,xb=/^(\w+:)?\/\/([^\/?#]+)/,yb=/%20/g,zb=c.fn.load;c.fn.extend({load:function(a,b,d){if(typeof a!==
+"string")return zb.call(this,a);else if(!this.length)return this;var f=a.indexOf(" ");if(f>=0){var e=a.slice(f,a.length);a=a.slice(0,f)}f="GET";if(b)if(c.isFunction(b)){d=b;b=null}else if(typeof b==="object"){b=c.param(b,c.ajaxSettings.traditional);f="POST"}var j=this;c.ajax({url:a,type:f,dataType:"html",data:b,complete:function(i,o){if(o==="success"||o==="notmodified")j.html(e?c("<div />").append(i.responseText.replace(tb,"")).find(e):i.responseText);d&&j.each(d,[i.responseText,o,i])}});return this},
+serialize:function(){return c.param(this.serializeArray())},serializeArray:function(){return this.map(function(){return this.elements?c.makeArray(this.elements):this}).filter(function(){return this.name&&!this.disabled&&(this.checked||ub.test(this.nodeName)||vb.test(this.type))}).map(function(a,b){a=c(this).val();return a==null?null:c.isArray(a)?c.map(a,function(d){return{name:b.name,value:d}}):{name:b.name,value:a}}).get()}});c.each("ajaxStart ajaxStop ajaxComplete ajaxError ajaxSuccess ajaxSend".split(" "),
+function(a,b){c.fn[b]=function(d){return this.bind(b,d)}});c.extend({get:function(a,b,d,f){if(c.isFunction(b)){f=f||d;d=b;b=null}return c.ajax({type:"GET",url:a,data:b,success:d,dataType:f})},getScript:function(a,b){return c.get(a,null,b,"script")},getJSON:function(a,b,d){return c.get(a,b,d,"json")},post:function(a,b,d,f){if(c.isFunction(b)){f=f||d;d=b;b={}}return c.ajax({type:"POST",url:a,data:b,success:d,dataType:f})},ajaxSetup:function(a){c.extend(c.ajaxSettings,a)},ajaxSettings:{url:location.href,
+global:true,type:"GET",contentType:"application/x-www-form-urlencoded",processData:true,async:true,xhr:A.XMLHttpRequest&&(A.location.protocol!=="file:"||!A.ActiveXObject)?function(){return new A.XMLHttpRequest}:function(){try{return new A.ActiveXObject("Microsoft.XMLHTTP")}catch(a){}},accepts:{xml:"application/xml, text/xml",html:"text/html",script:"text/javascript, application/javascript",json:"application/json, text/javascript",text:"text/plain",_default:"*/*"}},lastModified:{},etag:{},ajax:function(a){function b(){e.success&&
+e.success.call(k,o,i,x);e.global&&f("ajaxSuccess",[x,e])}function d(){e.complete&&e.complete.call(k,x,i);e.global&&f("ajaxComplete",[x,e]);e.global&&!--c.active&&c.event.trigger("ajaxStop")}function f(q,p){(e.context?c(e.context):c.event).trigger(q,p)}var e=c.extend(true,{},c.ajaxSettings,a),j,i,o,k=a&&a.context||e,n=e.type.toUpperCase();if(e.data&&e.processData&&typeof e.data!=="string")e.data=c.param(e.data,e.traditional);if(e.dataType==="jsonp"){if(n==="GET")N.test(e.url)||(e.url+=(ka.test(e.url)?
+"&":"?")+(e.jsonp||"callback")+"=?");else if(!e.data||!N.test(e.data))e.data=(e.data?e.data+"&":"")+(e.jsonp||"callback")+"=?";e.dataType="json"}if(e.dataType==="json"&&(e.data&&N.test(e.data)||N.test(e.url))){j=e.jsonpCallback||"jsonp"+sb++;if(e.data)e.data=(e.data+"").replace(N,"="+j+"$1");e.url=e.url.replace(N,"="+j+"$1");e.dataType="script";A[j]=A[j]||function(q){o=q;b();d();A[j]=w;try{delete A[j]}catch(p){}z&&z.removeChild(C)}}if(e.dataType==="script"&&e.cache===null)e.cache=false;if(e.cache===
+false&&n==="GET"){var r=J(),u=e.url.replace(wb,"$1_="+r+"$2");e.url=u+(u===e.url?(ka.test(e.url)?"&":"?")+"_="+r:"")}if(e.data&&n==="GET")e.url+=(ka.test(e.url)?"&":"?")+e.data;e.global&&!c.active++&&c.event.trigger("ajaxStart");r=(r=xb.exec(e.url))&&(r[1]&&r[1]!==location.protocol||r[2]!==location.host);if(e.dataType==="script"&&n==="GET"&&r){var z=s.getElementsByTagName("head")[0]||s.documentElement,C=s.createElement("script");C.src=e.url;if(e.scriptCharset)C.charset=e.scriptCharset;if(!j){var B=
+false;C.onload=C.onreadystatechange=function(){if(!B&&(!this.readyState||this.readyState==="loaded"||this.readyState==="complete")){B=true;b();d();C.onload=C.onreadystatechange=null;z&&C.parentNode&&z.removeChild(C)}}}z.insertBefore(C,z.firstChild);return w}var E=false,x=e.xhr();if(x){e.username?x.open(n,e.url,e.async,e.username,e.password):x.open(n,e.url,e.async);try{if(e.data||a&&a.contentType)x.setRequestHeader("Content-Type",e.contentType);if(e.ifModified){c.lastModified[e.url]&&x.setRequestHeader("If-Modified-Since",
+c.lastModified[e.url]);c.etag[e.url]&&x.setRequestHeader("If-None-Match",c.etag[e.url])}r||x.setRequestHeader("X-Requested-With","XMLHttpRequest");x.setRequestHeader("Accept",e.dataType&&e.accepts[e.dataType]?e.accepts[e.dataType]+", */*":e.accepts._default)}catch(ga){}if(e.beforeSend&&e.beforeSend.call(k,x,e)===false){e.global&&!--c.active&&c.event.trigger("ajaxStop");x.abort();return false}e.global&&f("ajaxSend",[x,e]);var g=x.onreadystatechange=function(q){if(!x||x.readyState===0||q==="abort"){E||
+d();E=true;if(x)x.onreadystatechange=c.noop}else if(!E&&x&&(x.readyState===4||q==="timeout")){E=true;x.onreadystatechange=c.noop;i=q==="timeout"?"timeout":!c.httpSuccess(x)?"error":e.ifModified&&c.httpNotModified(x,e.url)?"notmodified":"success";var p;if(i==="success")try{o=c.httpData(x,e.dataType,e)}catch(v){i="parsererror";p=v}if(i==="success"||i==="notmodified")j||b();else c.handleError(e,x,i,p);d();q==="timeout"&&x.abort();if(e.async)x=null}};try{var h=x.abort;x.abort=function(){x&&h.call(x);
+g("abort")}}catch(l){}e.async&&e.timeout>0&&setTimeout(function(){x&&!E&&g("timeout")},e.timeout);try{x.send(n==="POST"||n==="PUT"||n==="DELETE"?e.data:null)}catch(m){c.handleError(e,x,null,m);d()}e.async||g();return x}},handleError:function(a,b,d,f){if(a.error)a.error.call(a.context||a,b,d,f);if(a.global)(a.context?c(a.context):c.event).trigger("ajaxError",[b,a,f])},active:0,httpSuccess:function(a){try{return!a.status&&location.protocol==="file:"||a.status>=200&&a.status<300||a.status===304||a.status===
+1223||a.status===0}catch(b){}return false},httpNotModified:function(a,b){var d=a.getResponseHeader("Last-Modified"),f=a.getResponseHeader("Etag");if(d)c.lastModified[b]=d;if(f)c.etag[b]=f;return a.status===304||a.status===0},httpData:function(a,b,d){var f=a.getResponseHeader("content-type")||"",e=b==="xml"||!b&&f.indexOf("xml")>=0;a=e?a.responseXML:a.responseText;e&&a.documentElement.nodeName==="parsererror"&&c.error("parsererror");if(d&&d.dataFilter)a=d.dataFilter(a,b);if(typeof a==="string")if(b===
+"json"||!b&&f.indexOf("json")>=0)a=c.parseJSON(a);else if(b==="script"||!b&&f.indexOf("javascript")>=0)c.globalEval(a);return a},param:function(a,b){function d(i,o){if(c.isArray(o))c.each(o,function(k,n){b||/\[\]$/.test(i)?f(i,n):d(i+"["+(typeof n==="object"||c.isArray(n)?k:"")+"]",n)});else!b&&o!=null&&typeof o==="object"?c.each(o,function(k,n){d(i+"["+k+"]",n)}):f(i,o)}function f(i,o){o=c.isFunction(o)?o():o;e[e.length]=encodeURIComponent(i)+"="+encodeURIComponent(o)}var e=[];if(b===w)b=c.ajaxSettings.traditional;
+if(c.isArray(a)||a.jquery)c.each(a,function(){f(this.name,this.value)});else for(var j in a)d(j,a[j]);return e.join("&").replace(yb,"+")}});var la={},Ab=/toggle|show|hide/,Bb=/^([+-]=)?([\d+-.]+)(.*)$/,W,va=[["height","marginTop","marginBottom","paddingTop","paddingBottom"],["width","marginLeft","marginRight","paddingLeft","paddingRight"],["opacity"]];c.fn.extend({show:function(a,b){if(a||a===0)return this.animate(K("show",3),a,b);else{a=0;for(b=this.length;a<b;a++){var d=c.data(this[a],"olddisplay");
+this[a].style.display=d||"";if(c.css(this[a],"display")==="none"){d=this[a].nodeName;var f;if(la[d])f=la[d];else{var e=c("<"+d+" />").appendTo("body");f=e.css("display");if(f==="none")f="block";e.remove();la[d]=f}c.data(this[a],"olddisplay",f)}}a=0;for(b=this.length;a<b;a++)this[a].style.display=c.data(this[a],"olddisplay")||"";return this}},hide:function(a,b){if(a||a===0)return this.animate(K("hide",3),a,b);else{a=0;for(b=this.length;a<b;a++){var d=c.data(this[a],"olddisplay");!d&&d!=="none"&&c.data(this[a],
+"olddisplay",c.css(this[a],"display"))}a=0;for(b=this.length;a<b;a++)this[a].style.display="none";return this}},_toggle:c.fn.toggle,toggle:function(a,b){var d=typeof a==="boolean";if(c.isFunction(a)&&c.isFunction(b))this._toggle.apply(this,arguments);else a==null||d?this.each(function(){var f=d?a:c(this).is(":hidden");c(this)[f?"show":"hide"]()}):this.animate(K("toggle",3),a,b);return this},fadeTo:function(a,b,d){return this.filter(":hidden").css("opacity",0).show().end().animate({opacity:b},a,d)},
+animate:function(a,b,d,f){var e=c.speed(b,d,f);if(c.isEmptyObject(a))return this.each(e.complete);return this[e.queue===false?"each":"queue"](function(){var j=c.extend({},e),i,o=this.nodeType===1&&c(this).is(":hidden"),k=this;for(i in a){var n=i.replace(ia,ja);if(i!==n){a[n]=a[i];delete a[i];i=n}if(a[i]==="hide"&&o||a[i]==="show"&&!o)return j.complete.call(this);if((i==="height"||i==="width")&&this.style){j.display=c.css(this,"display");j.overflow=this.style.overflow}if(c.isArray(a[i])){(j.specialEasing=
+j.specialEasing||{})[i]=a[i][1];a[i]=a[i][0]}}if(j.overflow!=null)this.style.overflow="hidden";j.curAnim=c.extend({},a);c.each(a,function(r,u){var z=new c.fx(k,j,r);if(Ab.test(u))z[u==="toggle"?o?"show":"hide":u](a);else{var C=Bb.exec(u),B=z.cur(true)||0;if(C){u=parseFloat(C[2]);var E=C[3]||"px";if(E!=="px"){k.style[r]=(u||1)+E;B=(u||1)/z.cur(true)*B;k.style[r]=B+E}if(C[1])u=(C[1]==="-="?-1:1)*u+B;z.custom(B,u,E)}else z.custom(B,u,"")}});return true})},stop:function(a,b){var d=c.timers;a&&this.queue([]);
+this.each(function(){for(var f=d.length-1;f>=0;f--)if(d[f].elem===this){b&&d[f](true);d.splice(f,1)}});b||this.dequeue();return this}});c.each({slideDown:K("show",1),slideUp:K("hide",1),slideToggle:K("toggle",1),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"}},function(a,b){c.fn[a]=function(d,f){return this.animate(b,d,f)}});c.extend({speed:function(a,b,d){var f=a&&typeof a==="object"?a:{complete:d||!d&&b||c.isFunction(a)&&a,duration:a,easing:d&&b||b&&!c.isFunction(b)&&b};f.duration=c.fx.off?0:typeof f.duration===
+"number"?f.duration:c.fx.speeds[f.duration]||c.fx.speeds._default;f.old=f.complete;f.complete=function(){f.queue!==false&&c(this).dequeue();c.isFunction(f.old)&&f.old.call(this)};return f},easing:{linear:function(a,b,d,f){return d+f*a},swing:function(a,b,d,f){return(-Math.cos(a*Math.PI)/2+0.5)*f+d}},timers:[],fx:function(a,b,d){this.options=b;this.elem=a;this.prop=d;if(!b.orig)b.orig={}}});c.fx.prototype={update:function(){this.options.step&&this.options.step.call(this.elem,this.now,this);(c.fx.step[this.prop]||
+c.fx.step._default)(this);if((this.prop==="height"||this.prop==="width")&&this.elem.style)this.elem.style.display="block"},cur:function(a){if(this.elem[this.prop]!=null&&(!this.elem.style||this.elem.style[this.prop]==null))return this.elem[this.prop];return(a=parseFloat(c.css(this.elem,this.prop,a)))&&a>-10000?a:parseFloat(c.curCSS(this.elem,this.prop))||0},custom:function(a,b,d){function f(j){return e.step(j)}this.startTime=J();this.start=a;this.end=b;this.unit=d||this.unit||"px";this.now=this.start;
+this.pos=this.state=0;var e=this;f.elem=this.elem;if(f()&&c.timers.push(f)&&!W)W=setInterval(c.fx.tick,13)},show:function(){this.options.orig[this.prop]=c.style(this.elem,this.prop);this.options.show=true;this.custom(this.prop==="width"||this.prop==="height"?1:0,this.cur());c(this.elem).show()},hide:function(){this.options.orig[this.prop]=c.style(this.elem,this.prop);this.options.hide=true;this.custom(this.cur(),0)},step:function(a){var b=J(),d=true;if(a||b>=this.options.duration+this.startTime){this.now=
+this.end;this.pos=this.state=1;this.update();this.options.curAnim[this.prop]=true;for(var f in this.options.curAnim)if(this.options.curAnim[f]!==true)d=false;if(d){if(this.options.display!=null){this.elem.style.overflow=this.options.overflow;a=c.data(this.elem,"olddisplay");this.elem.style.display=a?a:this.options.display;if(c.css(this.elem,"display")==="none")this.elem.style.display="block"}this.options.hide&&c(this.elem).hide();if(this.options.hide||this.options.show)for(var e in this.options.curAnim)c.style(this.elem,
+e,this.options.orig[e]);this.options.complete.call(this.elem)}return false}else{e=b-this.startTime;this.state=e/this.options.duration;a=this.options.easing||(c.easing.swing?"swing":"linear");this.pos=c.easing[this.options.specialEasing&&this.options.specialEasing[this.prop]||a](this.state,e,0,1,this.options.duration);this.now=this.start+(this.end-this.start)*this.pos;this.update()}return true}};c.extend(c.fx,{tick:function(){for(var a=c.timers,b=0;b<a.length;b++)a[b]()||a.splice(b--,1);a.length||
+c.fx.stop()},stop:function(){clearInterval(W);W=null},speeds:{slow:600,fast:200,_default:400},step:{opacity:function(a){c.style(a.elem,"opacity",a.now)},_default:function(a){if(a.elem.style&&a.elem.style[a.prop]!=null)a.elem.style[a.prop]=(a.prop==="width"||a.prop==="height"?Math.max(0,a.now):a.now)+a.unit;else a.elem[a.prop]=a.now}}});if(c.expr&&c.expr.filters)c.expr.filters.animated=function(a){return c.grep(c.timers,function(b){return a===b.elem}).length};c.fn.offset="getBoundingClientRect"in s.documentElement?
+function(a){var b=this[0];if(a)return this.each(function(e){c.offset.setOffset(this,a,e)});if(!b||!b.ownerDocument)return null;if(b===b.ownerDocument.body)return c.offset.bodyOffset(b);var d=b.getBoundingClientRect(),f=b.ownerDocument;b=f.body;f=f.documentElement;return{top:d.top+(self.pageYOffset||c.support.boxModel&&f.scrollTop||b.scrollTop)-(f.clientTop||b.clientTop||0),left:d.left+(self.pageXOffset||c.support.boxModel&&f.scrollLeft||b.scrollLeft)-(f.clientLeft||b.clientLeft||0)}}:function(a){var b=
+this[0];if(a)return this.each(function(r){c.offset.setOffset(this,a,r)});if(!b||!b.ownerDocument)return null;if(b===b.ownerDocument.body)return c.offset.bodyOffset(b);c.offset.initialize();var d=b.offsetParent,f=b,e=b.ownerDocument,j,i=e.documentElement,o=e.body;f=(e=e.defaultView)?e.getComputedStyle(b,null):b.currentStyle;for(var k=b.offsetTop,n=b.offsetLeft;(b=b.parentNode)&&b!==o&&b!==i;){if(c.offset.supportsFixedPosition&&f.position==="fixed")break;j=e?e.getComputedStyle(b,null):b.currentStyle;
+k-=b.scrollTop;n-=b.scrollLeft;if(b===d){k+=b.offsetTop;n+=b.offsetLeft;if(c.offset.doesNotAddBorder&&!(c.offset.doesAddBorderForTableAndCells&&/^t(able|d|h)$/i.test(b.nodeName))){k+=parseFloat(j.borderTopWidth)||0;n+=parseFloat(j.borderLeftWidth)||0}f=d;d=b.offsetParent}if(c.offset.subtractsBorderForOverflowNotVisible&&j.overflow!=="visible"){k+=parseFloat(j.borderTopWidth)||0;n+=parseFloat(j.borderLeftWidth)||0}f=j}if(f.position==="relative"||f.position==="static"){k+=o.offsetTop;n+=o.offsetLeft}if(c.offset.supportsFixedPosition&&
+f.position==="fixed"){k+=Math.max(i.scrollTop,o.scrollTop);n+=Math.max(i.scrollLeft,o.scrollLeft)}return{top:k,left:n}};c.offset={initialize:function(){var a=s.body,b=s.createElement("div"),d,f,e,j=parseFloat(c.curCSS(a,"marginTop",true))||0;c.extend(b.style,{position:"absolute",top:0,left:0,margin:0,border:0,width:"1px",height:"1px",visibility:"hidden"});b.innerHTML="<div style='position:absolute;top:0;left:0;margin:0;border:5px solid #000;padding:0;width:1px;height:1px;'><div></div></div><table style='position:absolute;top:0;left:0;margin:0;border:5px solid #000;padding:0;width:1px;height:1px;' cellpadding='0' cellspacing='0'><tr><td></td></tr></table>";
+a.insertBefore(b,a.firstChild);d=b.firstChild;f=d.firstChild;e=d.nextSibling.firstChild.firstChild;this.doesNotAddBorder=f.offsetTop!==5;this.doesAddBorderForTableAndCells=e.offsetTop===5;f.style.position="fixed";f.style.top="20px";this.supportsFixedPosition=f.offsetTop===20||f.offsetTop===15;f.style.position=f.style.top="";d.style.overflow="hidden";d.style.position="relative";this.subtractsBorderForOverflowNotVisible=f.offsetTop===-5;this.doesNotIncludeMarginInBodyOffset=a.offsetTop!==j;a.removeChild(b);
+c.offset.initialize=c.noop},bodyOffset:function(a){var b=a.offsetTop,d=a.offsetLeft;c.offset.initialize();if(c.offset.doesNotIncludeMarginInBodyOffset){b+=parseFloat(c.curCSS(a,"marginTop",true))||0;d+=parseFloat(c.curCSS(a,"marginLeft",true))||0}return{top:b,left:d}},setOffset:function(a,b,d){if(/static/.test(c.curCSS(a,"position")))a.style.position="relative";var f=c(a),e=f.offset(),j=parseInt(c.curCSS(a,"top",true),10)||0,i=parseInt(c.curCSS(a,"left",true),10)||0;if(c.isFunction(b))b=b.call(a,
+d,e);d={top:b.top-e.top+j,left:b.left-e.left+i};"using"in b?b.using.call(a,d):f.css(d)}};c.fn.extend({position:function(){if(!this[0])return null;var a=this[0],b=this.offsetParent(),d=this.offset(),f=/^body|html$/i.test(b[0].nodeName)?{top:0,left:0}:b.offset();d.top-=parseFloat(c.curCSS(a,"marginTop",true))||0;d.left-=parseFloat(c.curCSS(a,"marginLeft",true))||0;f.top+=parseFloat(c.curCSS(b[0],"borderTopWidth",true))||0;f.left+=parseFloat(c.curCSS(b[0],"borderLeftWidth",true))||0;return{top:d.top-
+f.top,left:d.left-f.left}},offsetParent:function(){return this.map(function(){for(var a=this.offsetParent||s.body;a&&!/^body|html$/i.test(a.nodeName)&&c.css(a,"position")==="static";)a=a.offsetParent;return a})}});c.each(["Left","Top"],function(a,b){var d="scroll"+b;c.fn[d]=function(f){var e=this[0],j;if(!e)return null;if(f!==w)return this.each(function(){if(j=wa(this))j.scrollTo(!a?f:c(j).scrollLeft(),a?f:c(j).scrollTop());else this[d]=f});else return(j=wa(e))?"pageXOffset"in j?j[a?"pageYOffset":
+"pageXOffset"]:c.support.boxModel&&j.document.documentElement[d]||j.document.body[d]:e[d]}});c.each(["Height","Width"],function(a,b){var d=b.toLowerCase();c.fn["inner"+b]=function(){return this[0]?c.css(this[0],d,false,"padding"):null};c.fn["outer"+b]=function(f){return this[0]?c.css(this[0],d,false,f?"margin":"border"):null};c.fn[d]=function(f){var e=this[0];if(!e)return f==null?null:this;if(c.isFunction(f))return this.each(function(j){var i=c(this);i[d](f.call(this,j,i[d]()))});return"scrollTo"in
+e&&e.document?e.document.compatMode==="CSS1Compat"&&e.document.documentElement["client"+b]||e.document.body["client"+b]:e.nodeType===9?Math.max(e.documentElement["client"+b],e.body["scroll"+b],e.documentElement["scroll"+b],e.body["offset"+b],e.documentElement["offset"+b]):f===w?c.css(e,d):this.css(d,typeof f==="string"?f:f+"px")}});A.jQuery=A.$=c})(window);
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.png b/src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.png
new file mode 100644
index 0000000000..4625f9df74
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.psd b/src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.psd
new file mode 100644
index 0000000000..3764f82ccb
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.psd
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/scheduler.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/scheduler.js
new file mode 100644
index 0000000000..4417f5b438
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/scheduler.js
@@ -0,0 +1,71 @@
+// © 2010 EPFL/LAMP
+// code by Gilles Dubochet
+
+function Scheduler() {
+ var scheduler = this;
+ var resolution = 0;
+ this.timeout = undefined;
+ this.queues = new Array(0); // an array of work pacakges indexed by index in the labels table.
+ this.labels = new Array(0); // an indexed array of labels indexed by priority. This should be short.
+ this.label = function(name, priority) {
+ this.name = name;
+ this.priority = priority;
+ }
+ this.work = function(fn, self, args) {
+ this.fn = fn;
+ this.self = self;
+ this.args = args;
+ }
+ this.addLabel = function(name, priority) {
+ var idx = 0;
+ while (idx < scheduler.queues.length && scheduler.labels[idx].priority <= priority) { idx = idx + 1; }
+ scheduler.labels.splice(idx, 0, new scheduler.label(name, priority));
+ scheduler.queues.splice(idx, 0, new Array(0));
+ }
+ this.clearLabel = function(name) {
+ var idx = 0;
+ while (idx < scheduler.queues.length && scheduler.labels[idx].name != name) { idx = idx + 1; }
+ if (idx < scheduler.queues.length && scheduler.labels[i].name == name) {
+ scheduler.labels.splice(idx, 1);
+ scheduler.queues.splice(idx, 1);
+ }
+ }
+ this.nextWork = function() {
+ var fn = undefined;
+ var idx = 0;
+ while (idx < scheduler.queues.length && scheduler.queues[idx].length == 0) { idx = idx + 1; }
+ if (idx < scheduler.queues.length && scheduler.queues[idx].length > 0) {
+ var fn = scheduler.queues[idx].shift();
+ }
+ return fn;
+ }
+ this.add = function(labelName, fn, self, args) {
+ var doWork = function() {
+ scheduler.timeout = setTimeout(function() {
+ var work = scheduler.nextWork();
+ if (work != undefined) {
+ if (work.args == undefined) { work.args = new Array(0); }
+ work.fn.apply(work.self, work.args);
+ doWork();
+ }
+ else {
+ scheduler.timeout = undefined;
+ }
+ }, resolution);
+ }
+ var idx = 0;
+ while (idx < scheduler.labels.length && scheduler.labels[idx].name != labelName) { idx = idx + 1; }
+ if (idx < scheduler.queues.length && scheduler.labels[idx].name == labelName) {
+ scheduler.queues[idx].push(new scheduler.work(fn, self, args));
+ if (scheduler.timeout == undefined) doWork();
+ }
+ else throw("queue for add is non existant");
+ }
+ this.clear = function(labelName) {
+ var idx = 0;
+ while (idx < scheduler.labels.length && scheduler.labels[idx].name != labelName) { idx = idx + 1; }
+ if (idx < scheduler.queues.length && scheduler.labels[idx].name == labelName) {
+ scheduler.queues[idx] = new Array();
+ }
+ }
+};
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css b/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css
index 517c767241..a23c8b6402 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css
@@ -92,6 +92,7 @@ a:hover {
div.members > ol {
background-color: white;
+ list-style: none
}
div.members > ol > li {
@@ -100,6 +101,15 @@ div.members > ol > li {
/* Member signatures */
+#tooltip {
+ background: #142556;
+ border: 2px solid white;
+ color: white;
+ display: none;
+ padding: 5px;
+ position: absolute;
+}
+
.signature {
padding: 1px;
font-family: monospace;
@@ -112,12 +122,13 @@ div.members > ol > li {
.signature .kind {
text-align: right;
float: left;
+ display: inline-block;
width: 72px;
}
.signature .symbol {
- display: block;
- margin-left: 80px;
+ display: inline;
+ padding-left: 8px;
}
.signature .name {
@@ -125,12 +136,24 @@ div.members > ol > li {
font-weight: bold;
}
+.signature .symbol .params .implicit {
+ font-style: italic;
+}
+
+.signature .symbol .name.deprecated {
+ text-decoration: line-through;
+}
+
+.signature .symbol .params .default {
+ font-style: italic;
+}
+
#values .signature .name {
- color: #142556;
+ color: blue;
}
#types .signature .name {
- color: #561414;
+ color: red;
}
/* Comments text formating */
@@ -153,6 +176,45 @@ div.members > ol > li {
margin: 2px 0 2px 0;
}
+.cmt ul {
+ display: block;
+ list-style: circle;
+ padding-left:20px;
+}
+
+.cmt ol {
+ display: block;
+ padding-left:20px;
+}
+
+.cmt ol.decimal {
+ list-style: decimal;
+}
+
+.cmt ol.lowerAlpha {
+ list-style: lower-alpha;
+}
+
+.cmt ol.upperAlpha {
+ list-style: upper-alpha;
+}
+
+.cmt ol.lowerRoman {
+ list-style: lower-roman;
+}
+
+.cmt ol.upperRoman {
+ list-style: upper-roman;
+}
+
+.cmt li {
+ display:list-item;
+}
+
+.cmt a {
+ text-decoration: underline;
+}
+
/* Comments structured layout */
p.comment {
@@ -181,6 +243,11 @@ div.fullcomment .block {
border-bottom: 1px solid black;
}
+div.fullcomment div.block ol li p,
+div.fullcomment div.block ol li {
+ display:inline
+}
+
div.fullcomment .block + .block {
border-top: none;
}
@@ -231,16 +298,19 @@ div.fullcomment dl.paramcmts > dd + dt + dd {
#mbrsel > div > h3 {
padding: 4px;
- display: block;
- float: left;
+ display: inline;
}
#mbrsel > div > ol {
- margin-left: 80px;
+ display: inline-block;
+ background-color: white;
+}
+
+#mbrsel > div > ol#linearization {
+ display: inline;
}
#mbrsel > div > ol > li {
- display: block;
padding: 4px 8px 4px 8px;
font-weight: bold;
background-color: white;
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js
index 2f5efb1ede..47de01f6e2 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js
@@ -1,3 +1,6 @@
+// © 2009–2010 EPFL/LAMP
+// code by Gilles Dubochet with contributions by Pedro Furlanetto
+
$(document).ready(function(){
var prefilters = $("#ancestors > ol > li").filter(function(){
var name = $(this).attr("name");
@@ -5,7 +8,7 @@ $(document).ready(function(){
});
prefilters.removeClass("in");
prefilters.addClass("out");
- filterInherit();
+ filter();
$("#ancestors > ol > li").click(function(){
if ($(this).hasClass("in")) {
$(this).removeClass("in");
@@ -15,22 +18,43 @@ $(document).ready(function(){
$(this).removeClass("out");
$(this).addClass("in");
};
- filterInherit();
+ filter();
+ });
+ $("#ancestors > ol > li.hideall").click(function() {
+ $("#ancestors > ol > li.in").removeClass("in").addClass("out");
+ filter();
+ })
+ $("#ancestors > ol > li.showall").click(function() {
+ var filtered =
+ $("#ancestors > ol > li.out").filter(function() {
+ var name = $(this).attr("name");
+ return !(name == "scala.Any" || name == "scala.AnyRef");
+ });
+ filtered.removeClass("out").addClass("in");
+ filter();
+ });
+ $("#visbl > ol > li.public").click(function() {
+ if ($(this).hasClass("out")) {
+ $(this).removeClass("out").addClass("in");
+ $("#visbl > ol > li.all").removeClass("in").addClass("out");
+ filter();
+ };
+ })
+ $("#visbl > ol > li.all").click(function() {
+ if ($(this).hasClass("out")) {
+ $(this).removeClass("out").addClass("in");
+ $("#visbl > ol > li.public").removeClass("in").addClass("out");
+ filter();
+ };
+ });
+ //http://flowplayer.org/tools/tooltip.html
+ $(".extype").tooltip({
+ tip: "#tooltip",
+ position:"top center",
+ onBeforeShow: function(ev) {
+ $(this.getTip()).text(this.getTrigger().attr("name"));
+ }
});
- $(".signature .symbol .extype").hover(
- function(){
- var full = $(this).attr("name");
- var short = $(this).text();
- $(this).attr("name", short);
- $(this).text(full);
- },
- function(){
- var short = $(this).attr("name");
- var full = $(this).text();
- $(this).attr("name", full);
- $(this).text(short);
- }
- );
$("#template div.fullcomment").hide();
var docAllSigs = $("#template .signature");
function commentShowFct(fullComment){
@@ -73,28 +97,33 @@ $(document).ready(function(){
});
});
-function filterInherit() {
- $("#mbrsel > div > ol > li.in").each(function(){
- findMembersByOwner($(this).attr("name")).show();
- });
- $("#mbrsel > div > ol > li.out").each(function(){
- findMembersByOwner($(this).attr("name")).hide();
- });
- return false;
-};
-
-function findMembersByOwner(owner0) {
- return $(".members > ol > li").filter(function(){
+function filter() {
+ var outOwners =
+ $("#mbrsel > div > ol > li.out").map(function(){
+ $(this).attr("name")
+ }).get();
+ var prtVisbl = $("#visbl > ol > li.all").hasClass("in");
+ $(".members > ol > li").each(function(){
+ var vis1 = $(this).attr("visbl");
var qualName1 = $(this).attr("name");
- if (qualName1 == undefined) return false;
- return owner0 == qualName1.slice(0, qualName1.indexOf("#"));
+ var owner1 = qualName1.slice(0, qualName1.indexOf("#"));
+ //var name1 = qualName1.slice(qualName1.indexOf("#") + 1);
+ var showByOwned = true;
+ for (out in outOwners) {
+ if (out == owner1) {
+ showByOwned = false;
+ };
+ };
+ var showByVis = true
+ if (vis1 == "prt") {
+ showByVis = prtVisbl;
+ };
+ if (showByOwned && showByVis) {
+ $(this).show();
+ }
+ else {
+ $(this).hide();
+ };
});
+ return false
};
-
-function findMemberByName(name0) {
- return $(".members > ol > li").filter(function(){
- var qualName1 = $(this).attr("name");
- if (qualName1 == undefined) return false;
- return name0 == qualName1.slice(qualName1.indexOf("#") + 1);
- }).eq(0);
-}; \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/tools.tooltip.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/tools.tooltip.js
new file mode 100644
index 0000000000..0af34eca4c
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/tools.tooltip.js
@@ -0,0 +1,14 @@
+/*
+ * tools.tooltip 1.1.3 - Tooltips done right.
+ *
+ * Copyright (c) 2009 Tero Piirainen
+ * http://flowplayer.org/tools/tooltip.html
+ *
+ * Dual licensed under MIT and GPL 2+ licenses
+ * http://www.opensource.org/licenses
+ *
+ * Launch : November 2008
+ * Date: ${date}
+ * Revision: ${revision}
+ */
+(function(c){var d=[];c.tools=c.tools||{};c.tools.tooltip={version:"1.1.3",conf:{effect:"toggle",fadeOutSpeed:"fast",tip:null,predelay:0,delay:30,opacity:1,lazy:undefined,position:["top","center"],offset:[0,0],cancelDefault:true,relative:false,oneInstance:true,events:{def:"mouseover,mouseout",input:"focus,blur",widget:"focus mouseover,blur mouseout",tooltip:"mouseover,mouseout"},api:false},addEffect:function(e,g,f){b[e]=[g,f]}};var b={toggle:[function(e){var f=this.getConf(),g=this.getTip(),h=f.opacity;if(h<1){g.css({opacity:h})}g.show();e.call()},function(e){this.getTip().hide();e.call()}],fade:[function(e){this.getTip().fadeIn(this.getConf().fadeInSpeed,e)},function(e){this.getTip().fadeOut(this.getConf().fadeOutSpeed,e)}]};function a(f,g){var p=this,k=c(this);f.data("tooltip",p);var l=f.next();if(g.tip){l=c(g.tip);if(l.length>1){l=f.nextAll(g.tip).eq(0);if(!l.length){l=f.parent().nextAll(g.tip).eq(0)}}}function o(u){var t=g.relative?f.position().top:f.offset().top,s=g.relative?f.position().left:f.offset().left,v=g.position[0];t-=l.outerHeight()-g.offset[0];s+=f.outerWidth()+g.offset[1];var q=l.outerHeight()+f.outerHeight();if(v=="center"){t+=q/2}if(v=="bottom"){t+=q}v=g.position[1];var r=l.outerWidth()+f.outerWidth();if(v=="center"){s-=r/2}if(v=="left"){s-=r}return{top:t,left:s}}var i=f.is(":input"),e=i&&f.is(":checkbox, :radio, select, :button"),h=f.attr("type"),n=g.events[h]||g.events[i?(e?"widget":"input"):"def"];n=n.split(/,\s*/);if(n.length!=2){throw"Tooltip: bad events configuration for "+h}f.bind(n[0],function(r){if(g.oneInstance){c.each(d,function(){this.hide()})}var q=l.data("trigger");if(q&&q[0]!=this){l.hide().stop(true,true)}r.target=this;p.show(r);n=g.events.tooltip.split(/,\s*/);l.bind(n[0],function(){p.show(r)});if(n[1]){l.bind(n[1],function(){p.hide(r)})}});f.bind(n[1],function(q){p.hide(q)});if(!c.browser.msie&&!i&&!g.predelay){f.mousemove(function(){if(!p.isShown()){f.triggerHandler("mouseover")}})}if(g.opacity<1){l.css("opacity",g.opacity)}var m=0,j=f.attr("title");if(j&&g.cancelDefault){f.removeAttr("title");f.data("title",j)}c.extend(p,{show:function(r){if(r){f=c(r.target)}clearTimeout(l.data("timer"));if(l.is(":animated")||l.is(":visible")){return p}function q(){l.data("trigger",f);var t=o(r);if(g.tip&&j){l.html(f.data("title"))}r=r||c.Event();r.type="onBeforeShow";k.trigger(r,[t]);if(r.isDefaultPrevented()){return p}t=o(r);l.css({position:"absolute",top:t.top,left:t.left});var s=b[g.effect];if(!s){throw'Nonexistent effect "'+g.effect+'"'}s[0].call(p,function(){r.type="onShow";k.trigger(r)})}if(g.predelay){clearTimeout(m);m=setTimeout(q,g.predelay)}else{q()}return p},hide:function(r){clearTimeout(l.data("timer"));clearTimeout(m);if(!l.is(":visible")){return}function q(){r=r||c.Event();r.type="onBeforeHide";k.trigger(r);if(r.isDefaultPrevented()){return}b[g.effect][1].call(p,function(){r.type="onHide";k.trigger(r)})}if(g.delay&&r){l.data("timer",setTimeout(q,g.delay))}else{q()}return p},isShown:function(){return l.is(":visible, :animated")},getConf:function(){return g},getTip:function(){return l},getTrigger:function(){return f},bind:function(q,r){k.bind(q,r);return p},onHide:function(q){return this.bind("onHide",q)},onBeforeShow:function(q){return this.bind("onBeforeShow",q)},onShow:function(q){return this.bind("onShow",q)},onBeforeHide:function(q){return this.bind("onBeforeHide",q)},unbind:function(q){k.unbind(q);return p}});c.each(g,function(q,r){if(c.isFunction(r)){p.bind(q,r)}})}c.prototype.tooltip=function(e){var f=this.eq(typeof e=="number"?e:0).data("tooltip");if(f){return f}var g=c.extend(true,{},c.tools.tooltip.conf);if(c.isFunction(e)){e={onBeforeShow:e}}else{if(typeof e=="string"){e={tip:e}}}e=c.extend(true,g,e);if(typeof e.position=="string"){e.position=e.position.split(/,?\s/)}if(e.lazy!==false&&(e.lazy===true||this.length>20)){this.one("mouseover",function(h){f=new a(c(this),e);f.show(h);d.push(f)})}else{this.each(function(){f=new a(c(this),e);d.push(f)})}return e.api?f:this}})(jQuery); \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/versions.txt b/src/compiler/scala/tools/nsc/doc/html/resource/lib/versions.txt
index a5056fc9dd..17d1caeb66 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/versions.txt
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/versions.txt
@@ -1 +1 @@
-jquery=1.3.2
+jquery=1.4.2
diff --git a/src/compiler/scala/tools/nsc/doc/model/Entity.scala b/src/compiler/scala/tools/nsc/doc/model/Entity.scala
index d7ef2b866d..7aa2c234ea 100644
--- a/src/compiler/scala/tools/nsc/doc/model/Entity.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/Entity.scala
@@ -41,10 +41,10 @@ trait MemberEntity extends Entity {
def toRoot: List[MemberEntity]
def inDefinitionTemplates: List[TemplateEntity]
def definitionName: String
- def visibility: Option[Paragraph]
+ def visibility: Visibility
def flags: List[Paragraph]
+ def deprecation: Option[Body]
def inheritedFrom: List[TemplateEntity]
- def isDeprecated: Boolean
def resultType: TypeEntity
def isDef: Boolean
def isVal: Boolean
@@ -60,6 +60,7 @@ trait MemberEntity extends Entity {
trait DocTemplateEntity extends TemplateEntity with MemberEntity {
def toRoot: List[DocTemplateEntity]
def inSource: Option[(io.AbstractFile, Int)]
+ def sourceUrl: Option[java.net.URL]
def typeParams: List[TypeParam]
def parentType: Option[TypeEntity]
def linearization: List[TemplateEntity]
@@ -71,6 +72,21 @@ trait DocTemplateEntity extends TemplateEntity with MemberEntity {
def abstractTypes: List[AbstractType]
def aliasTypes: List[AliasType]
def companion: Option[DocTemplateEntity]
+ // temporary implementation: to be removed
+ def findMember(str: String): Option[DocTemplateEntity] = {
+ val root = toRoot.last
+ val path = if (str.length > 0) str.split("\\.") else Array[String]()
+ var i = 0;
+ var found: DocTemplateEntity = root
+ while(i < path.length && found != null) {
+ found = found.members.find(_.name == path(i)) match {
+ case Some(doc:DocTemplateEntity) => doc
+ case _ => null
+ }
+ i += 1
+ }
+ Option(found)
+ }
}
/** A ''documentable'' trait. */
@@ -95,6 +111,9 @@ trait Package extends Object {
def packages: List[Package]
}
+/** A package represent the root of Entities hierarchy */
+trait RootPackage extends Package
+
trait NonTemplateMemberEntity extends MemberEntity {
def isUseCase: Boolean
}
@@ -141,5 +160,38 @@ trait TypeParam extends ParameterEntity {
/** A value parameter to a constructor or to a method. */
trait ValueParam extends ParameterEntity {
- def resultType : TypeEntity
+ def resultType: TypeEntity
+ def defaultValue: Option[String]
+ def isImplicit: Boolean
+}
+
+/** An type that represents visibility of members. */
+sealed trait Visibility {
+ def isProtected: Boolean = false
+ def isPublic: Boolean = false
+}
+
+/** The visibility of `private[this]` members. */
+case class PrivateInInstance extends Visibility
+
+/** The visibility of `protected[this]` members. */
+case class ProtectedInInstance extends Visibility {
+ override def isProtected = true
+}
+
+/** The visibility of `private[owner]` members. An unqualified private members is encoded with `owner` equal to the
+ * members's `inTemplate`. */
+case class PrivateInTemplate(owner: TemplateEntity) extends Visibility
+
+/** The visibility of `protected[owner]` members. An unqualified protected members is encoded with `owner` equal to the
+ * members's `inTemplate`.
+ * Note that whilst the member is visible in any template owned by `owner`, it is only visible in subclasses of the
+ * member's `inTemplate`. */
+case class ProtectedInTemplate(owner: TemplateEntity) extends Visibility {
+ override def isProtected = true
+}
+
+/** The visibility of public members. */
+case class Public extends Visibility {
+ override def isPublic = true
}
diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
index f935dd4478..3968d3483c 100644
--- a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
@@ -9,51 +9,37 @@ import comment._
import scala.collection._
import symtab.Flags
-import util.Position
+
+import model.{ RootPackage => RootPackageEntity }
/** This trait extracts all required information for documentation from compilation units */
-class ModelFactory(val global: Global, val settings: doc.Settings) { extractor =>
+class ModelFactory(val global: Global, val settings: doc.Settings) extends CommentFactory { thisFactory =>
import global._
- import definitions.{ ObjectClass, ScalaObjectClass, RootPackage, EmptyPackage }
+ import definitions.{ ObjectClass, ScalaObjectClass, RootPackage, EmptyPackage, NothingClass, AnyClass, AnyRefClass }
private var droppedPackages = 0
def templatesCount = templatesCache.size - droppedPackages
- /** */
- def makeModel: Package =
- makePackage(RootPackage, null) getOrElse { throw new Error("no documentable class found in compilation units") }
-
- object commentator {
-
- private val factory = new CommentFactory(reporter)
-
- private val commentCache = mutable.HashMap.empty[(Symbol, TemplateImpl), Comment]
-
- def registeredUseCase(sym: Symbol, inTpl: => TemplateImpl, docStr: String, docPos: Position): Symbol = {
- commentCache += (sym, inTpl) -> factory.parse(docStr, docPos)
- sym
- }
-
- def comment(sym: Symbol, inTpl: => DocTemplateImpl): Option[Comment] = {
- val key = (sym, inTpl)
- if (commentCache isDefinedAt key)
- Some(commentCache(key))
- else { // not reached for use-case comments
- val rawComment = expandedDocComment(sym, inTpl.sym)
- if (rawComment == "") None else {
- val c = factory.parse(rawComment, docCommentPos(sym))
- commentCache += (sym, inTpl) -> c
- Some(c)
- }
- }
- }
+ private var modelFinished = false
+ /** */
+ def makeModel: Universe = {
+ val rootPackage =
+ makeRootPackage getOrElse { throw new Error("no documentable class found in compilation units") }
+ val universe = new Universe(settings, rootPackage)
+ modelFinished = true
+ universe
}
/** */
protected val templatesCache =
- new mutable.LinkedHashMap[(Symbol, TemplateImpl), DocTemplateImpl]
+ new mutable.LinkedHashMap[Symbol, DocTemplateImpl]
+
+ def findTemplate(query: String): Option[DocTemplateImpl] = {
+ if (!modelFinished) throw new Error("cannot find template in unfinished universe")
+ templatesCache.values find { tpl => tpl.qualifiedName == query && !tpl.isObject }
+ }
def optimize(str: String): String =
if (str.length < 16) str.intern else str
@@ -88,44 +74,47 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { extractor =
/** Provides a default implementation for instances of the `MemberEntity` type. It must be instantiated as a
* `SymbolicEntity` to access the compiler symbol that underlies the entity. */
abstract class MemberImpl(sym: Symbol, inTpl: => DocTemplateImpl) extends EntityImpl(sym, inTpl) with MemberEntity {
- val comment =
- if (inTpl == null) None else commentator.comment(sym, inTpl)
+ lazy val comment =
+ if (inTpl == null) None else thisFactory.comment(sym, inTpl)
override def inTemplate = inTpl
override def toRoot: List[MemberImpl] = this :: inTpl.toRoot
def inDefinitionTemplates =
if (inTpl == null)
- makePackage(RootPackage, null).toList
- else if (sym.owner == inTpl.sym)
- inTpl :: Nil
+ makeRootPackage.toList
else
makeTemplate(sym.owner) :: (sym.allOverriddenSymbols map { inhSym => makeTemplate(inhSym.owner) })
def visibility = {
- def qual = {
- val qq =
- if (sym hasFlag Flags.LOCAL)
- Some("this")
- else if (sym.privateWithin != null && sym.privateWithin != NoSymbol)
- Some(sym.privateWithin.nameString) // TODO: create an inline link to the qualifier entity
+ if (sym.isPrivateLocal) PrivateInInstance()
+ else if (sym.isProtectedLocal) ProtectedInInstance()
+ else {
+ val qual =
+ if (sym.privateWithin != null && sym.privateWithin != NoSymbol)
+ Some(makeTemplate(sym.privateWithin))
else None
- qq match { case Some(q) => "[" + q + "]" case None => "" }
+ if (sym.isPrivate) PrivateInTemplate(inTpl)
+ else if (sym.isProtected) ProtectedInTemplate(qual getOrElse inTpl)
+ else if (qual.isDefined) PrivateInTemplate(qual.get)
+ else Public()
}
- if (sym hasFlag Flags.PRIVATE) Some(Paragraph(Text(optimize("private" + qual))))
- else if (sym hasFlag Flags.PROTECTED) Some(Paragraph(Text(optimize("protected" + qual))))
- else None
}
def flags = {
val fgs = mutable.ListBuffer.empty[Paragraph]
- if (sym hasFlag Flags.IMPLICIT) fgs += Paragraph(Text("implicit"))
+ if (sym.isImplicit) fgs += Paragraph(Text("implicit"))
if (sym hasFlag Flags.SEALED) fgs += Paragraph(Text("sealed"))
if (!sym.isTrait && (sym hasFlag Flags.ABSTRACT)) fgs += Paragraph(Text("abstract"))
if (!sym.isTrait && (sym hasFlag Flags.DEFERRED)) fgs += Paragraph(Text("abstract"))
if (!sym.isModule && (sym hasFlag Flags.FINAL)) fgs += Paragraph(Text("final"))
fgs.toList
}
+ def deprecation =
+ if (sym.isDeprecated)
+ Some(sym.deprecationMessage map (x => parseWiki(x, NoPosition)) getOrElse Body(Nil))
+ else
+ comment flatMap (_.deprecated)
+
def inheritedFrom =
if (inTemplate.sym == this.sym.owner || inTemplate.sym.isPackage) Nil else
makeTemplate(this.sym.owner) :: (sym.allOverriddenSymbols map { os => makeTemplate(os.owner) })
- def isDeprecated = sym.isDeprecated
def resultType = makeType(sym.tpe.finalResultType, inTemplate, sym)
def isDef = false
def isVal = false
@@ -146,10 +135,22 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { extractor =
* * All non-package members (including other templates, as full templates). */
abstract class DocTemplateImpl(sym: Symbol, inTpl: => DocTemplateImpl) extends MemberImpl(sym, inTpl) with TemplateImpl with DocTemplateEntity {
//if (inTpl != null) println("mbr " + sym + " in " + (inTpl.toRoot map (_.sym)).mkString(" > "))
- templatesCache += ((sym, inTpl) -> this)
- override def definitionName = optimize(inDefinitionTemplates.head.qualifiedName + "." + name)
+ templatesCache += (sym -> this)
+ lazy val definitionName = optimize(inDefinitionTemplates.head.qualifiedName + "." + name)
override def toRoot: List[DocTemplateImpl] = this :: inTpl.toRoot
def inSource = if (sym.sourceFile != null) Some(sym.sourceFile, sym.pos.line) else None
+ def sourceUrl = {
+ def fixPath(s: String) = s.replaceAll("\\" + java.io.File.separator, "/")
+ val assumedSourceRoot: String = {
+ val fixed = fixPath(settings.sourcepath.value)
+ if (fixed endsWith "/") fixed.dropRight(1) else fixed
+ }
+ if (!settings.docsourceurl.isDefault)
+ inSource map { case (file, _) =>
+ new java.net.URL(settings.docsourceurl.value + "/" + fixPath(file.path).replaceFirst("^" + assumedSourceRoot, ""))
+ }
+ else None
+ }
def typeParams = if (sym.isClass) sym.typeParams map (makeTypeParam(_, this)) else Nil
def parentType =
if (sym.isPackage) None else
@@ -161,7 +162,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { extractor =
case _ =>
}
}
- sym.ancestors filter (_ != ScalaObjectClass) map (makeTemplate(_))
+ sym.ancestors filter (_ != ScalaObjectClass) map makeTemplate
}
private lazy val subClassesCache = mutable.Buffer.empty[DocTemplateEntity]
def registerSubClass(sc: DocTemplateEntity) = {
@@ -169,16 +170,18 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { extractor =
subClassesCache += sc
}
def subClasses = subClassesCache.toList
- protected def memberSyms = sym.info.nonPrivateMembers
- val members: List[MemberEntity] = memberSyms flatMap (makeMember(_, this))
- val templates = members partialMap { case c: DocTemplateEntity => c }
- val methods = members partialMap { case d: Def => d }
- val values = members partialMap { case v: Val => v }
- val abstractTypes = members partialMap { case t: AbstractType => t }
- val aliasTypes = members partialMap { case t: AliasType => t }
+ protected lazy val memberSyms =
+ // Only this class's constructors are part of its members, inherited constructors are not.
+ sym.info.members.filter(s => localShouldDocument(s) && (!s.isConstructor || s.owner == sym))
+ val members = memberSyms flatMap (makeMember(_, this))
+ val templates = members collect { case c: DocTemplateEntity => c }
+ val methods = members collect { case d: Def => d }
+ val values = members collect { case v: Val => v }
+ val abstractTypes = members collect { case t: AbstractType => t }
+ val aliasTypes = members collect { case t: AliasType => t }
override def isTemplate = true
def isDocTemplate = true
- def companion = sym.linkedSym match {
+ def companion = sym.companionSymbol match {
case NoSymbol => None
case comSym => Some(makeDocTemplate(comSym, inTpl))
}
@@ -187,49 +190,61 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { extractor =
abstract class PackageImpl(sym: Symbol, inTpl: => PackageImpl) extends DocTemplateImpl(sym, inTpl) with Package {
override def inTemplate = inTpl
override def toRoot: List[PackageImpl] = this :: inTpl.toRoot
- val packages = members partialMap { case p: Package => p }
+ val packages = members collect { case p: Package => p }
}
+ abstract class RootPackageImpl(sym: Symbol) extends PackageImpl(sym, null) with RootPackageEntity
+
abstract class NonTemplateMemberImpl(sym: Symbol, inTpl: => DocTemplateImpl) extends MemberImpl(sym, inTpl) with NonTemplateMemberEntity {
override def qualifiedName = optimize(inTemplate.qualifiedName + "#" + name)
- override def definitionName = optimize(inDefinitionTemplates.head.qualifiedName + "#" + name)
+ lazy val definitionName = optimize(inDefinitionTemplates.head.qualifiedName + "#" + name)
}
abstract class ParameterImpl(sym: Symbol, inTpl: => DocTemplateImpl) extends EntityImpl(sym, inTpl) with ParameterEntity {
override def inTemplate = inTpl
}
+ private trait StdTypeBounds extends EntityImpl {
+ def lo = sym.info.normalize match {
+ case TypeBounds(lo, hi) if lo.typeSymbol != NothingClass => Some(makeType(lo, inTemplate, sym))
+ case _ => None
+ }
+ def hi = sym.info.normalize match {
+ case TypeBounds(lo, hi) if hi.typeSymbol != AnyClass => Some(makeType(hi, inTemplate, sym))
+ case _ => None
+ }
+ }
+
/* ============== MAKER METHODS ============== */
/** */
- def normalizeTemplate(aSym: Symbol): Symbol = {
- if (aSym == null || aSym == EmptyPackage || aSym == NoSymbol)
- normalizeTemplate(RootPackage)
- else if (aSym == ScalaObjectClass || aSym == ObjectClass)
- normalizeTemplate(definitions.AnyRefClass)
- else if (aSym.isModuleClass || aSym.isPackageObject)
- normalizeTemplate(aSym.sourceModule)
- else
- aSym
+ def normalizeTemplate(aSym: Symbol): Symbol = aSym match {
+ case null | EmptyPackage | NoSymbol => normalizeTemplate(RootPackage)
+ case ScalaObjectClass | ObjectClass => normalizeTemplate(AnyRefClass)
+ case _ if aSym.isModuleClass || aSym.isPackageObject => normalizeTemplate(aSym.sourceModule)
+ case _ => aSym
}
+ def makeRootPackage: Option[PackageImpl] =
+ makePackage(RootPackage, null)
+
/** Creates a package entity for the given symbol or returns `None` if the symbol does not denote a package that
* contains at least one ''documentable'' class, trait or object. Creating a package entity */
def makePackage(aSym: Symbol, inTpl: => PackageImpl): Option[PackageImpl] = {
val bSym = normalizeTemplate(aSym)
- if (templatesCache isDefinedAt (bSym, inTpl))
- Some(templatesCache(bSym, inTpl) match {case p: PackageImpl => p})
+ if (templatesCache isDefinedAt (bSym))
+ Some(templatesCache(bSym) match {case p: PackageImpl => p})
else {
val pack =
if (bSym == RootPackage)
- new PackageImpl(bSym, null) {
+ new RootPackageImpl(bSym) {
override val name = "root"
override def inTemplate = this
override def toRoot = this :: Nil
override def qualifiedName = "_root_"
override def inheritedFrom = Nil
override def isRootPackage = true
- override protected def memberSyms =
+ override protected lazy val memberSyms =
(bSym.info.members ++ EmptyPackage.info.members) filter { s =>
s != EmptyPackage && s != RootPackage
}
@@ -249,52 +264,48 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { extractor =
def makeTemplate(aSym: Symbol): TemplateImpl = {
val bSym = normalizeTemplate(aSym)
if (bSym == RootPackage)
- makePackage(bSym, null).get
- else
- makeTemplate(bSym, makeTemplate(bSym.owner))
- }
-
- /** */
- def makeTemplate(aSym: Symbol, inTpl: => TemplateImpl): TemplateImpl = {
- val bSym = normalizeTemplate(aSym)
- if (bSym.isPackage) inTpl match {
- case inPkg: PackageImpl => makePackage(bSym, inPkg) getOrElse (new NoDocTemplateImpl(bSym, inPkg))
- case _ => throw new Error("'" + bSym + "' must be in a package")
- }
- else if ((bSym.sourceFile != null) && bSym.isPublic && !bSym.isLocal) inTpl match {
- case inDTpl: DocTemplateImpl => makeDocTemplate(bSym, inDTpl)
- case _ => new NoDocTemplateImpl(bSym, inTpl) // The owner is private
- }
+ makeRootPackage.get
+ else if (bSym.isPackage)
+ makeTemplate(bSym.owner) match {
+ case inPkg: PackageImpl => makePackage(bSym, inPkg) getOrElse (new NoDocTemplateImpl(bSym, inPkg))
+ case _ => throw new Error("'" + bSym + "' must be in a package")
+ }
+ else if (templateShouldDocument(bSym))
+ makeTemplate(bSym.owner) match {
+ case inDTpl: DocTemplateImpl => makeDocTemplate(bSym, inDTpl)
+ case _ => throw new Error("'" + bSym + "' must be in documentable template")
+ }
else
- new NoDocTemplateImpl(bSym, inTpl)
+ new NoDocTemplateImpl(bSym, makeTemplate(bSym.owner))
}
/** */
def makeDocTemplate(aSym: Symbol, inTpl: => DocTemplateImpl): DocTemplateImpl = {
val bSym = normalizeTemplate(aSym)
- val firstInTpl = { // to prevent a complexity explosion in some cases.
- def sInTpl0(inTpl: DocTemplateImpl): DocTemplateImpl =
- if ((aSym.owner != inTpl.inTemplate.sym) && (inTpl.inTemplate.sym.info.members contains aSym))
- sInTpl0(inTpl.inTemplate)
- else inTpl
- sInTpl0(inTpl)
- }
- if (templatesCache isDefinedAt (bSym, firstInTpl))
- templatesCache((bSym, firstInTpl))
+ val minimumInTpl =
+ if (bSym.owner != inTpl.sym)
+ makeTemplate(aSym.owner) match {
+ case inDTpl: DocTemplateImpl => inDTpl
+ case inNDTpl => throw new Error("'" + bSym + "' is owned by '" + inNDTpl + "' which is not documented")
+ }
+ else
+ inTpl
+ if (templatesCache isDefinedAt (bSym))
+ templatesCache(bSym)
else if (bSym.isModule || (bSym.isAliasType && bSym.tpe.typeSymbol.isModule))
- new DocTemplateImpl(bSym, firstInTpl) with Object
+ new DocTemplateImpl(bSym, minimumInTpl) with Object
else if (bSym.isTrait || (bSym.isAliasType && bSym.tpe.typeSymbol.isTrait))
- new DocTemplateImpl(bSym, firstInTpl) with Trait {
+ new DocTemplateImpl(bSym, minimumInTpl) with Trait {
def valueParams =
List(sym.constrParamAccessors map (makeValueParam(_, this)))
}
else if (bSym.isClass || (bSym.isAliasType && bSym.tpe.typeSymbol.isClass))
- new DocTemplateImpl(bSym, firstInTpl) with Class {
+ new DocTemplateImpl(bSym, minimumInTpl) with Class {
def valueParams =
List(sym.constrParamAccessors map (makeValueParam(_, this)))
val constructors =
- members partialMap { case d: Constructor => d }
- def primaryConstructor = (constructors find (_.isPrimary))
+ members collect { case d: Constructor => d }
+ def primaryConstructor = constructors find (_.isPrimary)
def isCaseClass = sym.isClass && sym.hasFlag(Flags.CASE)
}
else
@@ -303,78 +314,69 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { extractor =
/** */
def makeMember(aSym: Symbol, inTpl: => DocTemplateImpl): List[MemberImpl] = {
+
def makeMember0(bSym: Symbol): Option[MemberImpl] = {
- if (bSym.isGetter && (bSym.accessed hasFlag Flags.MUTABLE))
- Some(new NonTemplateMemberImpl(bSym, inTpl) with Val {
+ abstract class MakeMemberTemplateImpl extends NonTemplateMemberImpl(bSym, inTpl) {
+ def isUseCase = bSym.isSynthetic
+ }
+ trait MethodParams extends MakeMemberTemplateImpl {
+ def valueParams =
+ sym.paramss map { ps =>
+ ps.zipWithIndex map { case (p, i) =>
+ if (p.nameString contains "$") makeValueParam(p, inTemplate, optimize("arg" + i))
+ else makeValueParam(p, inTemplate)
+ }
+ }
+ }
+
+ if (bSym.isGetter && bSym.accessed.isMutable)
+ Some(new MakeMemberTemplateImpl with Val {
override def isVar = true
- def isUseCase = bSym hasFlag Flags.SYNTHETIC
})
- else if (bSym.isMethod && !(bSym hasFlag Flags.ACCESSOR) && !bSym.isConstructor && !(bSym hasFlag Flags.FINAL))
- Some(new NonTemplateMemberImpl(bSym, inTpl) with Def {
+ else if (bSym.isMethod && !bSym.isGetterOrSetter && !bSym.isConstructor)
+ Some(new MakeMemberTemplateImpl with Def with MethodParams {
override def isDef = true
- def isUseCase = bSym hasFlag Flags.SYNTHETIC
def typeParams =
sym.tpe.typeParams map (makeTypeParam(_, inTpl))
- def valueParams =
- sym.paramss map { ps => (ps.zipWithIndex) map { case (p, i) =>
- if (p.nameString contains "$") makeValueParam(p, inTpl, optimize("arg" + i)) else makeValueParam(p, inTpl)
- }}
})
else if (bSym.isConstructor)
- Some(new NonTemplateMemberImpl(bSym, inTpl) with Constructor {
+ Some(new MakeMemberTemplateImpl with Constructor with MethodParams {
override def isConstructor = true
- def isUseCase = bSym hasFlag Flags.SYNTHETIC
def isPrimary = sym.isPrimaryConstructor
- def valueParams =
- sym.paramss map { ps => (ps.zipWithIndex) map { case (p, i) =>
- if (p.nameString contains "$") makeValueParam(p, inTpl, optimize("arg" + i)) else makeValueParam(p, inTpl)
- }}
})
else if (bSym.isGetter) // Scala field accessor or Java field
- Some(new NonTemplateMemberImpl(bSym, inTpl) with Val {
+ Some(new MakeMemberTemplateImpl with Val {
override def isVal = true
- def isUseCase = bSym hasFlag Flags.SYNTHETIC
})
else if (bSym.isAbstractType)
- Some(new NonTemplateMemberImpl(bSym, inTpl) with AbstractType {
+ Some(new MakeMemberTemplateImpl with AbstractType with StdTypeBounds {
override def isAbstractType = true
- def isUseCase = bSym hasFlag Flags.SYNTHETIC
- def lo = sym.info.normalize match {
- case TypeBounds(lo, hi) if lo.typeSymbol != definitions.NothingClass => Some(makeType(lo, inTpl, sym))
- case _ => None
- }
- def hi = sym.info.normalize match {
- case TypeBounds(lo, hi) if hi.typeSymbol != definitions.AnyClass => Some(makeType(hi, inTpl, sym))
- case _ => None
- }
})
else if (bSym.isAliasType)
- Some(new NonTemplateMemberImpl(bSym, inTpl) with AliasType {
+ Some(new MakeMemberTemplateImpl with AliasType {
override def isAliasType = true
- def isUseCase = bSym hasFlag Flags.SYNTHETIC
def alias = makeType(sym.tpe, inTpl, sym)
})
else if (bSym.isPackage)
inTpl match { case inPkg: PackageImpl => makePackage(bSym, inPkg) }
- else if ((bSym.isClass || bSym.isModule) && (bSym.sourceFile != null) && bSym.isPublic && !bSym.isLocal) {
- (inTpl.toRoot find (_.sym == bSym )) orElse Some(makeDocTemplate(bSym, inTpl))
- }
+ else if ((bSym.isClass || bSym.isModule) && templateShouldDocument(bSym))
+ Some(makeDocTemplate(bSym, inTpl))
else
None
}
- if (!aSym.isPublic || (aSym hasFlag Flags.SYNTHETIC) || (aSym hasFlag Flags.BRIDGE) || aSym.isLocal || aSym.isModuleClass || aSym.isPackageObject || aSym.isMixinConstructor)
+ if ((!aSym.isPackage && aSym.sourceFile == null) || !localShouldDocument(aSym) || aSym.isModuleClass || aSym.isPackageObject || aSym.isMixinConstructor)
Nil
else {
val allSyms = useCases(aSym, inTpl.sym) map { case (bSym, bComment, bPos) =>
- commentator.registeredUseCase(bSym, inTpl, bComment, bPos)
+ addCommentBody(bSym, inTpl, bComment, bPos)
}
- (allSyms ::: List(aSym)) flatMap (makeMember0(_))
+ (allSyms :+ aSym) flatMap makeMember0
}
}
/** */
def makeTypeParam(aSym: Symbol, inTpl: => DocTemplateImpl): TypeParam = {
- new ParameterImpl(aSym, inTpl) with TypeParam {
+ new ParameterImpl(aSym, inTpl) with TypeParam with StdTypeBounds {
def isTypeParam = true
def isValueParam = false
def variance: String = {
@@ -382,16 +384,6 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { extractor =
else if (sym hasFlag Flags.CONTRAVARIANT) "-"
else ""
}
- def lo = sym.info.normalize match {
- case TypeBounds(lo, hi) if lo.typeSymbol != definitions.NothingClass =>
- Some(makeType(lo, inTpl, sym))
- case _ => None
- }
- def hi = sym.info.normalize match {
- case TypeBounds(lo, hi) if hi.typeSymbol != definitions.AnyClass =>
- Some(makeType(hi, inTpl, sym))
- case _ => None
- }
}
}
@@ -401,16 +393,27 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { extractor =
}
/** */
- def makeValueParam(aSym: Symbol, inTpl: => DocTemplateImpl, newName: String): ValueParam = {
+ def makeValueParam(aSym: Symbol, inTpl: => DocTemplateImpl, newName: String): ValueParam =
new ParameterImpl(aSym, inTpl) with ValueParam {
override val name = newName
def isTypeParam = false
def isValueParam = true
- def resultType = {
+ def defaultValue =
+ if (aSym.hasDefault)
+ // units.filter should return only one element
+ (currentRun.units filter (_.source.file == aSym.sourceFile)).toList match {
+ case List(unit) =>
+ (unit.body find (_.symbol == aSym)) match {
+ case Some(ValDef(_,_,_,rhs)) => Some(rhs.toString)
+ case _ => None
+ }
+ case _ => None
+ }
+ else None
+ def resultType =
makeType(sym.tpe, inTpl, sym)
- }
+ def isImplicit = aSym.isImplicit
}
- }
/** */
def makeType(aType: Type, seeInTpl: => TemplateImpl, dclSym: Symbol): TypeEntity = {
@@ -452,7 +455,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { extractor =
nameBuffer append ')'
case TypeRef(pre, aSym, targs) =>
val bSym = normalizeTemplate(aSym)
- if (bSym.isTypeMember)
+ if (bSym.isNonClassType)
nameBuffer append bSym.name
else {
val tpl = makeTemplate(bSym)
@@ -489,4 +492,10 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { extractor =
val name = optimize(nameBuffer.toString)
}
+ def templateShouldDocument(aSym: Symbol): Boolean = {
+ (aSym.isPackageClass || (aSym.sourceFile != null)) && localShouldDocument(aSym) && ( aSym.owner == NoSymbol || templateShouldDocument(aSym.owner) )
+ }
+
+ def localShouldDocument(aSym: Symbol): Boolean =
+ !aSym.isPrivate && (aSym.isProtected || aSym.privateWithin == NoSymbol) && !aSym.isSynthetic
}
diff --git a/src/compiler/scala/tools/nsc/doc/model/TypeEntity.scala b/src/compiler/scala/tools/nsc/doc/model/TypeEntity.scala
index 681ef4e02a..989dfa048e 100644
--- a/src/compiler/scala/tools/nsc/doc/model/TypeEntity.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/TypeEntity.scala
@@ -11,7 +11,7 @@ import scala.collection._
abstract class TypeEntity {
- /** A string reprsentation of this type. */
+ /** A string representation of this type. */
def name: String
/** Maps which parts of this type's name reference other entities. The map is indexed by the position of the first
diff --git a/src/compiler/scala/tools/nsc/doc/model/comment/Body.scala b/src/compiler/scala/tools/nsc/doc/model/comment/Body.scala
index 28359104e0..7d2aee2d98 100644
--- a/src/compiler/scala/tools/nsc/doc/model/comment/Body.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/comment/Body.scala
@@ -19,7 +19,7 @@ final case class Title(text: Inline, level: Int) extends Block
final case class Paragraph(text: Inline) extends Block
final case class Code(data: String) extends Block
final case class UnorderedList(items: Seq[Block]) extends Block
-final case class OrderedList(items: Seq[Block]) extends Block
+final case class OrderedList(items: Seq[Block], style: String) extends Block
final case class DefinitionList(items: SortedMap[Inline, Block]) extends Block
final case class HorizontalRule() extends Block
@@ -32,6 +32,7 @@ final case class Bold(text: Inline) extends Inline
final case class Underline(text: Inline) extends Inline
final case class Superscript(text: Inline) extends Inline
final case class Subscript(text: Inline) extends Inline
-final case class Link(raw: String) extends Inline // TODO
+final case class Link(target: String, title: Inline) extends Inline
+final case class EntityLink(target: TemplateEntity) extends Inline
final case class Monospace(text: String) extends Inline
final case class Text(text: String) extends Inline
diff --git a/src/compiler/scala/tools/nsc/doc/model/comment/Comment.scala b/src/compiler/scala/tools/nsc/doc/model/comment/Comment.scala
index 97ee9abdf0..7fe2e58991 100644
--- a/src/compiler/scala/tools/nsc/doc/model/comment/Comment.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/comment/Comment.scala
@@ -9,52 +9,64 @@ import scala.collection._
/** A Scaladoc comment and all its tags.
*
- * '''Note:''' the only instantiation site of this class is in `Parser`.
+ * '''Note:''' the only instantiation site of this class is in [[CommentFactory]].
*
* @author Gilles Dubochet
* @author Manohar Jonnalagedda */
abstract class Comment {
- /** */
+ /** The main body of the comment that describes what the entity does and is. */
def body: Body
- /* author|deprecated|param|return|see|since|throws|version|todo|tparam */
+ /** A shorter version of the body. Usually, this is the first sentence of the body. */
def short: Inline
- /** */
+ /** A list of authors. The empty list is used when no author is defined. */
def authors: List[Body]
- /** */
+ /** A list of other resources to see, including links to other entities or to external documentation. The empty list
+ * is used when no other resource is mentionned. */
def see: List[Body]
- /** */
+ /** A description of the result of the entity. Typically, this provides additional information on the domain of the
+ * result, contractual post-conditions, etc. */
def result: Option[Body]
- /** */
+ /** A map of exceptions that the entity can throw when accessed, and a description of what they mean. */
def throws: Map[String, Body]
- /** */
+ /** A map of value parameters, and a description of what they are. Typically, this provides additional information on
+ * the domain of the parameters, contractual pre-conditions, etc. */
def valueParams: Map[String, Body]
- /** */
+ /** A map of type parameters, and a description of what they are. Typically, this provides additional information on
+ * the domain of the parameters. */
def typeParams: Map[String, Body]
- /** */
+ /** The version number of the entity. There is no formatting or further meaning attached to this value. */
def version: Option[Body]
- /** */
+ /** A version number of a containing entity where this member-entity was introduced. */
def since: Option[Body]
- /** */
+ /** An annotation as to expected changes on this entity. */
def todo: List[Body]
- /** */
+ /** Whether the entity is deprecated. Using the "@deprecated" Scala attribute is prefereable to using this Scaladoc
+ * tag. */
def deprecated: Option[Body]
+ /** An additional note concerning the contract of the entity. */
+ def note: List[Body]
+
+ /** A usage example related to the entity. */
+ def example: List[Body]
+
override def toString =
body.toString + "\n" +
(authors map ("@author " + _.toString)).mkString("\n") +
- (result map ("@return " + _.toString)).mkString
+ (result map ("@return " + _.toString)).mkString("\n") +
+ (version map ("@version " + _.toString)).mkString
}
diff --git a/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala b/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala
index 4504a97af5..1a0bf37a84 100644
--- a/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala
@@ -19,28 +19,42 @@ import scala.annotation.switch
*
* @author Manohar Jonnalagedda
* @author Gilles Dubochet */
-final class CommentFactory(val reporter: Reporter) { parser =>
+trait CommentFactory { thisFactory: ModelFactory with CommentFactory =>
- final val endOfText = '\u0003'
- final val endOfLine = '\u000A'
+ val global: Global
+ import global.reporter
- /** Something that should not have happened, happened, and Scaladoc should exit. */
- protected def oops(msg: String): Nothing =
- throw FatalError("program logic: " + msg)
+ private val commentCache = mutable.HashMap.empty[(global.Symbol, TemplateImpl), Comment]
- protected val CleanHtml =
- new Regex("""</?(p|h\d|pre|dl|dt|dd|ol|ul|li|blockquote|div|hr|br|br)\s*/?>""")
+ def addCommentBody(sym: global.Symbol, inTpl: => TemplateImpl, docStr: String, docPos: global.Position): global.Symbol = {
+ commentCache += (sym, inTpl) -> parse(docStr, docPos)
+ sym
+ }
- protected val ShortLineEnd =
- new Regex("""\.|</(p|h\d|pre|dd|li|div|blockquote)>|<(hr|table)\s*/?>""")
+ def comment(sym: global.Symbol, inTpl: => DocTemplateImpl): Option[Comment] = {
+ val key = (sym, inTpl)
+ if (commentCache isDefinedAt key)
+ Some(commentCache(key))
+ else { // not reached for use-case comments
+ val rawComment = global.expandedDocComment(sym, inTpl.sym).trim
+ if (rawComment == "") None else {
+ val c = parse(rawComment, global.docCommentPos(sym))
+ commentCache += (sym, inTpl) -> c
+ Some(c)
+ }
+ }
+ }
- /** The body of a comment, dropping start and end markers. */
- protected val CleanComment =
- new Regex("""(?s)\s*/\*\*((?:[^\*]\*)*)\*/\s*""")
+ protected val endOfText = '\u0003'
+ protected val endOfLine = '\u000A'
- /** The body of a line, dropping the start star-marker, one leading whitespace and all trailing whitespace. */
+ /** Something that should not have happened, happened, and Scaladoc should exit. */
+ protected def oops(msg: String): Nothing =
+ throw FatalError("program logic: " + msg)
+
+ /** The body of a line, dropping the (optional) start star-marker, one leading whitespace and all trailing whitespace. */
protected val CleanCommentLine =
- new Regex("""\*\s?(.*)""")
+ new Regex("""(?:\s*\*\s?)?(.*)""")
/** A Scaladoc tag not linked to a symbol. Returns the name of the tag, and the rest of the line. */
protected val SimpleTag =
@@ -51,6 +65,14 @@ final class CommentFactory(val reporter: Reporter) { parser =>
protected val SymbolTag =
new Regex("""\s*@(param|tparam|throws)\s+(\S*)\s*(.*)""")
+ /** The start of a scaladoc code block */
+ protected val CodeBlockStart =
+ new Regex("""(.*)\{\{\{(.*)""")
+
+ /** The end of a scaladoc code block */
+ protected val CodeBlockEnd =
+ new Regex("""(.*)\}\}\}(.*)""")
+
/** A key used for a tag map. The key is built from the name of the tag and from the linked symbol if the tag has one.
* Equality on tag keys is structural. */
protected sealed abstract class TagKey {
@@ -63,137 +85,158 @@ final class CommentFactory(val reporter: Reporter) { parser =>
/** Parses a raw comment string into a `Comment` object.
* @param comment The raw comment string (including start and end markers) to be parsed.
* @param pos The position of the comment in source. */
- def parse(comment: String, pos: Position): Comment = {
+ protected def parse(comment: String, pos: Position): Comment = {
/** The cleaned raw comment as a list of lines. Cleaning removes comment start and end markers, line start markers
* and unnecessary whitespace. */
val cleaned: List[String] = {
- def cleanLine(line: String): Option[String] = {
- line.trim match {
- case CleanCommentLine(ctl) => Some(ctl)
- case "" =>
- None
+ def cleanLine(line: String): String = {
+ //replaceAll removes trailing whitespaces
+ line.replaceAll("""\s+$""", "") match {
+ case "" => "" // Empty lines are require to keep paragraphs
+ case CleanCommentLine(ctl) => ctl
case tl =>
- reporter.warning(pos, "Comment has no start-of-line marker ('*')")
- Some(tl)
+ reporter.warning(pos, "Please re-check this line of the comment")
+ tl
+ }
}
- }
- comment.trim.stripPrefix("/*").stripSuffix("*/").lines.toList flatMap (cleanLine(_))
- }
+ comment.trim.stripPrefix("/*").stripSuffix("*/").lines.toList map (cleanLine(_))
+ }
+
+ /** Parses a comment (in the form of a list of lines) to a Comment instance, recursively on lines. To do so, it
+ * splits the whole comment into main body and tag bodies, then runs the `WikiParser` on each body before creating
+ * the comment instance.
+ *
+ * @param body The body of the comment parsed until now.
+ * @param tags All tags parsed until now.
+ * @param lastTagKey The last parsed tag, or `None` if the tag section hasn't started. Lines that are not tagged
+ * are part of the previous tag or, if none exists, of the body.
+ * @param remaining The lines that must still recursively be parsed.
+ * @param inCodeBlock Whether the next line is part of a code block (in which no tags must be read). */
+ def parse0(docBody: String, tags: Map[TagKey, List[String]], lastTagKey: Option[TagKey], remaining: List[String], inCodeBlock: Boolean): Comment = {
+ remaining match {
+
+ case CodeBlockStart(before, after) :: ls if (!inCodeBlock) =>
+ if (before.trim != "")
+ parse0(docBody, tags, lastTagKey, before :: ("{{{" + after) :: ls, false)
+ else if (after.trim != "")
+ parse0(docBody, tags, lastTagKey, after :: ls, true)
+ else
+ parse0(docBody, tags, lastTagKey, ls, true)
+
+ case CodeBlockEnd(before, after) :: ls =>
+ if (before.trim != "")
+ parse0(docBody, tags, lastTagKey, before :: ("}}}" + after) :: ls, true)
+ else if (after.trim != "")
+ parse0(docBody, tags, lastTagKey, after :: ls, false)
+ else
+ parse0(docBody, tags, lastTagKey, ls, false)
+
+ case SymbolTag(name, sym, body) :: ls if (!inCodeBlock) =>
+ val key = SymbolTagKey(name, sym)
+ val value = body :: tags.getOrElse(key, Nil)
+ parse0(docBody, tags + (key -> value), Some(key), ls, inCodeBlock)
+
+ case SimpleTag(name, body) :: ls if (!inCodeBlock) =>
+ val key = SimpleTagKey(name)
+ val value = body :: tags.getOrElse(key, Nil)
+ parse0(docBody, tags + (key -> value), Some(key), ls, inCodeBlock)
+
+ case line :: ls if (lastTagKey.isDefined) =>
+ val key = lastTagKey.get
+ val value =
+ ((tags get key): @unchecked) match {
+ case Some(b :: bs) => (b + endOfLine + line) :: bs
+ case None => oops("lastTagKey set when no tag exists for key")
+ }
+ parse0(docBody, tags + (key -> value), lastTagKey, ls, inCodeBlock)
- /** Parses a comment (in the form of a list of lines) to a Comment instance, recursively on lines. To do so, it
- * splits the whole comment into main body and tag bodies, then runs the `WikiParser` on each body before creating
- * the comment instance.
- *
- * @param body The body of the comment parsed until now.
- * @param tags All tags parsed until now.
- * @param lastTagKey The last parsed tag, or `None` if the tag section hasn't started. Lines that are not tagged
- * are part of the previous tag or, if none exists, of the body.
- * @param remaining The lines that must still recursively be parsed. */
- def parse0(docBody: String, tags: Map[TagKey, List[String]], lastTagKey: Option[TagKey], remaining: List[String]): Comment =
- remaining match {
-
- case SymbolTag(name, sym, body) :: ls =>
- val key = SymbolTagKey(name, sym)
- val value = body :: tags.getOrElse(key, Nil)
- parse0(docBody, tags + (key -> value), Some(key), ls)
-
- case SimpleTag(name, body) :: ls =>
- val key = SimpleTagKey(name)
- val value = body :: tags.getOrElse(key, Nil)
- parse0(docBody, tags + (key -> value), Some(key), ls)
-
- case line :: ls if (lastTagKey.isDefined) =>
- val key = lastTagKey.get
- val value =
- ((tags get key): @unchecked) match {
- case Some(b :: bs) => (b + endOfLine + line) :: bs
- case None => oops("lastTagKey set when no tag exists for key")
- }
- parse0(docBody, tags + (key -> value), lastTagKey, ls)
+ case line :: ls =>
+ val newBody = if (docBody == "") line else docBody + endOfLine + line
+ parse0(newBody, tags, lastTagKey, ls, inCodeBlock)
- case line :: ls =>
- val newBody =
- if (docBody == "") line else docBody + endOfLine + line
- parse0(newBody, tags, lastTagKey, ls)
+ case Nil =>
- case Nil =>
+ val bodyTags: mutable.Map[TagKey, List[Body]] =
+ mutable.Map(tags mapValues (_ map (parseWiki(_, pos))) toSeq: _*)
- val bodyTags: mutable.Map[TagKey, List[Body]] =
- mutable.Map((tags map { case (key, values) => key -> (values map (parseWiki(_, pos))) }).toSeq:_*)
+ def oneTag(key: SimpleTagKey): Option[Body] =
+ ((bodyTags remove key): @unchecked) match {
+ case Some(r :: rs) =>
+ if (!rs.isEmpty) reporter.warning(pos, "Only one '@" + key.name + "' tag is allowed")
+ Some(r)
+ case None => None
+ }
- def oneTag(key: SimpleTagKey): Option[Body] =
- ((bodyTags remove key): @unchecked) match {
- case Some(r :: rs) =>
- if (!rs.isEmpty) reporter.warning(pos, "Only one '@" + key.name + "' tag is allowed")
- Some(r)
- case None => None
+ def allTags(key: SimpleTagKey): List[Body] =
+ (bodyTags remove key) getOrElse Nil
+
+ def allSymsOneTag(key: TagKey): Map[String, Body] = {
+ val keys: Seq[SymbolTagKey] =
+ bodyTags.keys.toSeq flatMap {
+ case stk: SymbolTagKey if (stk.name == key.name) => Some(stk)
+ case stk: SimpleTagKey if (stk.name == key.name) =>
+ reporter.warning(pos, "Tag '@" + stk.name + "' must be followed by a symbol name")
+ None
+ case _ => None
+ }
+ val pairs: Seq[(String, Body)] =
+ for (key <- keys) yield {
+ val bs = (bodyTags remove key).get
+ if (bs.length > 1)
+ reporter.warning(pos, "Only one '@" + key.name + "' tag for symbol " + key.symbol + " is allowed")
+ (key.symbol, bs.head)
+ }
+ Map.empty[String, Body] ++ pairs
}
- def allTags(key: SimpleTagKey): List[Body] =
- (bodyTags remove key) getOrElse Nil
-
- def allSymsOneTag(key: TagKey): Map[String, Body] = {
- val keys: Seq[SymbolTagKey] =
- bodyTags.keysIterator.toSeq flatMap {
- case stk: SymbolTagKey if (stk.name == key.name) => Some(stk)
- case stk: SimpleTagKey if (stk.name == key.name) =>
- reporter.warning(pos, "Tag '@" + stk.name + "' must be followed by a symbol name")
- None
- case _ => None
- }
- val pairs: Seq[(String, Body)] =
- for (key <- keys) yield {
- val bs = (bodyTags remove key).get
- if (bs.length > 1)
- reporter.warning(pos, "Only one '@" + key.name + "' tag for symbol " + key.symbol + " is allowed")
- (key.symbol, bs.head)
- }
- Map.empty[String, Body] ++ pairs
- }
-
- val com = new Comment {
- val body = parseWiki(docBody, pos)
- val authors = allTags(SimpleTagKey("author"))
- val see = allTags(SimpleTagKey("see"))
- val result = oneTag(SimpleTagKey("return"))
- val throws = allSymsOneTag(SimpleTagKey("throws"))
- val valueParams = allSymsOneTag(SimpleTagKey("param"))
- val typeParams = allSymsOneTag(SimpleTagKey("tparam"))
- val version = oneTag(SimpleTagKey("version"))
- val since = oneTag(SimpleTagKey("since"))
- val todo = allTags(SimpleTagKey("todo"))
- val deprecated = oneTag(SimpleTagKey("deprecated"))
- val short = {
- val shortText = ShortLineEnd.findFirstMatchIn(docBody) match {
- case None => docBody
- case Some(m) => docBody.take(m.start)
- }
- val safeText = CleanHtml.replaceAllIn(shortText, "") // get rid of all layout-busting html tags
- parseWiki(safeText, pos) match {
- case Body(Paragraph(inl) :: _) => inl
- case _ =>
- reporter.warning(pos, "Comment must start with a sentence")
- Text("")
+ val com = new Comment {
+ val body = parseWiki(docBody, pos)
+ val authors = allTags(SimpleTagKey("author"))
+ val see = allTags(SimpleTagKey("see"))
+ val result = oneTag(SimpleTagKey("return"))
+ val throws = allSymsOneTag(SimpleTagKey("throws"))
+ val valueParams = allSymsOneTag(SimpleTagKey("param"))
+ val typeParams = allSymsOneTag(SimpleTagKey("tparam"))
+ val version = oneTag(SimpleTagKey("version"))
+ val since = oneTag(SimpleTagKey("since"))
+ val todo = allTags(SimpleTagKey("todo"))
+ val deprecated = oneTag(SimpleTagKey("deprecated"))
+ val note = allTags(SimpleTagKey("note"))
+ val example = allTags(SimpleTagKey("example"))
+ val short = {
+ def findShort(blocks: Iterable[Block]): Inline =
+ if (blocks.isEmpty) Text("")
+ else blocks.head match {
+ case Title(text, _) => text
+ case Paragraph(text) => text
+ case Code(data) => Monospace(data.lines.next)
+ case UnorderedList(items) => findShort(items)
+ case OrderedList(items, _) => findShort(items)
+ case DefinitionList(items) => findShort(items.values)
+ case HorizontalRule() => findShort(blocks.tail)
+ }
+ findShort(body.blocks)
}
}
- }
- for ((key, _) <- bodyTags)
- reporter.warning(pos, "Tag '@" + key.name + "' is not recognised")
+ for ((key, _) <- bodyTags)
+ reporter.warning(pos, "Tag '@" + key.name + "' is not recognised")
- com
+ com
+ }
}
- parse0("", Map.empty, None, cleaned)
+ parse0("", Map.empty, None, cleaned, false)
+
}
/** Parses a string containing wiki syntax into a `Comment` object. Note that the string is assumed to be clean:
- * * Removed Scaladoc start and end markers.
- * * Removed start-of-line star and one whitespace afterwards (if present).
- * * Removed all end-of-line whitespace.
- * * Only `endOfLine` is used to mark line endings. */
- protected def parseWiki(string: String, pos: Position): Body =
+ * - Removed Scaladoc start and end markers.
+ * - Removed start-of-line star and one whitespace afterwards (if present).
+ * - Removed all end-of-line whitespace.
+ * - Only `endOfLine` is used to mark line endings. */
+ def parseWiki(string: String, pos: Position): Body =
new WikiParser(string.toArray, pos).document()
/** TODO
@@ -203,6 +246,17 @@ final class CommentFactory(val reporter: Reporter) { parser =>
* @author Gilles Dubochet */
protected final class WikiParser(val buffer: Array[Char], pos: Position) extends CharReader(buffer) { wiki =>
+ /** listStyle ::= '-' spc | '1.' spc | 'I.' spc | 'i.' spc | 'A.' spc | 'a.' spc
+ * Characters used to build lists and their constructors */
+ protected val listStyles = Map[String, (Seq[Block] => Block)]( // TODO Should this be defined at some list companion?
+ "- " -> ( UnorderedList(_) ),
+ "1. " -> ( OrderedList(_,"decimal") ),
+ "I. " -> ( OrderedList(_,"upperRoman") ),
+ "i. " -> ( OrderedList(_,"lowerRoman") ),
+ "A. " -> ( OrderedList(_,"upperAlpha") ),
+ "a. " -> ( OrderedList(_,"lowerAlpha") )
+ )
+
def document(): Body = {
nextChar()
val blocks = new mutable.ListBuffer[Block]
@@ -221,18 +275,64 @@ final class CommentFactory(val reporter: Reporter) { parser =>
title()
else if (check("----"))
hrule()
- // TODO: Lists
+ else if (checkList)
+ listBlock
else {
para()
}
}
- /** {{{ code ::= "{{{" { char } '}' "}}" '\n' }}} */
+ /** Checks if the current line is formed with more than one space and one the listStyles */
+ def checkList =
+ countWhitespace > 0 && listStyles.keysIterator.indexWhere(checkSkipWhitespace(_)) >= 0
+
+ /** {{{
+ * nListBlock ::= nLine { mListBlock }
+ * nLine ::= nSpc listStyle para '\n'
+ * }}}
+ * Where n and m stand for the number of spaces. When m > n, a new list is nested. */
+ def listBlock: Block = {
+ /** consumes one line of a list block */
+ def listLine(indentedListStyle: String): Block = {
+ // deals with mixed lists in the same nesting level by skipping it
+ if(!jump(indentedListStyle)) { // TODO show warning when jump is false
+ nextChar();
+ nextChar()
+ }
+ val p = Paragraph(inline(check(Array(endOfLine))))
+ blockEnded("end of list line ")
+ p
+ }
+ def listLevel(leftSide: String, listStyle: String, constructor: (Seq[Block] => Block)): Block = {
+ val blocks = mutable.ListBuffer.empty[Block]
+ val length = leftSide.length
+ val indentedListStyle = leftSide + listStyle
+
+ var index = 1
+ var line = listLine(indentedListStyle)
+
+ while (index > -1) {
+ blocks += line
+ if (countWhitespace > length) { // nesting-in
+ blocks += listBlock // TODO is tailrec really needed here?
+ }
+ index = listStyles.keysIterator.indexWhere(x => check(leftSide))
+ if (index > -1) { line = listLine(indentedListStyle) }
+ }
+
+ constructor(blocks)
+ }
+ val indentation = countWhitespace
+ val indentStr = " " * indentation
+ val style = listStyles.keysIterator.find( x => check(indentStr + x) ).getOrElse(listStyles.keysIterator.next)
+ val constructor = listStyles(style)
+ listLevel(indentStr, style, constructor)
+ }
def code(): Block = {
jump("{{{")
readUntil("}}}")
if (char == endOfText)
- reporter.warning(pos, "unclosed code block")
+ reportError(pos, "unclosed code block")
else
jump("}}}")
blockEnded("code block")
@@ -245,7 +345,7 @@ final class CommentFactory(val reporter: Reporter) { parser =>
val text = inline(check(Array.fill(inLevel)('=')))
val outLevel = repeatJump("=", inLevel)
if (inLevel != outLevel)
- reporter.warning(pos, "unbalanced or unclosed heading")
+ reportError(pos, "unbalanced or unclosed heading")
blockEnded("heading")
Title(text, inLevel)
}
@@ -259,12 +359,7 @@ final class CommentFactory(val reporter: Reporter) { parser =>
/** {{{ para ::= inline '\n' }}} */
def para(): Block = {
- def checkParaEnd(): Boolean = {
- check(Array(endOfLine, endOfLine)) ||
- check(Array(endOfLine, '='))
- check(Array(endOfLine, '{', '{', '{'))
- }
- val p = Paragraph(inline(checkParaEnd()))
+ val p = Paragraph(inline(checkParaEnded()))
while (char == endOfLine && char != endOfText)
nextChar()
p
@@ -359,11 +454,39 @@ final class CommentFactory(val reporter: Reporter) { parser =>
Subscript(i)
}
+ protected val SchemeUri =
+ new Regex("""([^:]+:.*)""")
+
+ def entityLink(query: String): Inline = findTemplate(query) match {
+ case Some(tpl) =>
+ EntityLink(tpl)
+ case None =>
+ Text(query)
+ }
+
def link(isInlineEnd: => Boolean, isBlockEnd: => Boolean): Inline = {
jump("[[")
- readUntil { check("]]") }
+ readUntil { check("]]") || check(" ") }
+ val target = getRead()
+ val title =
+ if (!check("]]")) Some({
+ jump(" ")
+ inline(check("]]"), isBlockEnd)
+ })
+ else None
jump("]]")
- Link(getRead())
+ (target, title) match {
+ case (SchemeUri(uri), Some(title)) =>
+ Link(uri, title)
+ case (SchemeUri(uri), None) =>
+ Link(uri, Text(uri))
+ case (qualName, None) =>
+ entityLink(qualName)
+ case (qualName, Some(text)) =>
+ reportError(pos, "entity link to " + qualName + " cannot have a custom title'" + text + "'")
+ entityLink(qualName)
+ }
+
}
/* UTILITY */
@@ -371,7 +494,7 @@ final class CommentFactory(val reporter: Reporter) { parser =>
/** {{{ eol ::= { whitespace } '\n' }}} */
def blockEnded(blockType: String): Unit = {
if (char != endOfLine && char != endOfText) {
- reporter.warning(pos, "no additional content on same line after " + blockType)
+ reportError(pos, "no additional content on same line after " + blockType)
jumpUntil(endOfLine)
}
while (char == endOfLine)
@@ -379,9 +502,19 @@ final class CommentFactory(val reporter: Reporter) { parser =>
}
def checkParaEnded(): Boolean = {
- char == endOfText || check(Array(endOfLine, endOfLine)) || check(Array(endOfLine, '{', '{', '{')) || check(Array(endOfLine, '\u003D'))
+ (char == endOfText) ||
+ ((char == endOfLine) && {
+ check(Array(endOfLine, endOfLine)) ||
+ check(Array(endOfLine, '=')) ||
+ check(Array(endOfLine, '{', '{', '{')) ||
+ check(Array(endOfLine, ' ', '-', ' ')) ||
+ check(Array(endOfLine, '\u003D'))
+ })
}
+ def reportError(pos: Position, message: String): Unit =
+ reporter.warning(pos, message)
+
}
protected sealed class CharReader(buffer: Array[Char]) { reader =>
@@ -416,8 +549,36 @@ final class CommentFactory(val reporter: Reporter) { parser =>
ok
}
+ def checkSkipWhitespace(chars: Array[Char]): Boolean = {
+ assert(chars.head!=' ') // or it makes no sense
+ val poff = offset
+ val pc = char
+ jumpWhitespace
+ val ok = jump(chars)
+ offset = poff
+ char = pc
+ ok
+ }
+
+ def countWhitespace:Int = {
+ var count = 0
+ val poff = offset
+ val pc = char
+ while (isWhitespace(char) && char!=endOfText) {
+ nextChar()
+ count += 1
+ }
+ offset = poff
+ char = pc
+ count
+ }
+
/* JUMPERS */
+ /** jumps all the characters in chars
+ * @return true only if the correct characters have been jumped
+ * consumes any matching characters
+ */
final def jump(chars: Array[Char]): Boolean = {
var index = 0
while (index < chars.length && char == chars(index) && char != endOfText) {
@@ -444,7 +605,8 @@ final class CommentFactory(val reporter: Reporter) { parser =>
while (more && count < max) {
if (!checkedJump(chars))
more = false
- count += 1
+ else
+ count += 1
}
count
}
@@ -455,15 +617,18 @@ final class CommentFactory(val reporter: Reporter) { parser =>
while (more) {
if (!checkedJump(chars))
more = false
- count += 1
+ else
+ count += 1
}
count
}
final def jumpUntil(ch: Char): Int = {
var count = 0
- while(char != ch && char != endOfText)
+ while(char != ch && char != endOfText) {
nextChar()
+ count=count+1
+ }
count
}
@@ -473,16 +638,20 @@ final class CommentFactory(val reporter: Reporter) { parser =>
val c = chars(0)
while(!check(chars) && char != endOfText) {
nextChar()
- while (char != c && char != endOfText)
+ while (char != c && char != endOfText) {
nextChar()
+ count += 1
+ }
}
count
}
final def jumpUntil(pred: => Boolean): Int = {
var count = 0
- while (!pred && char != endOfText)
+ while (!pred && char != endOfText) {
nextChar()
+ count += 1
+ }
count
}
diff --git a/src/compiler/scala/tools/nsc/interactive/BuildManager.scala b/src/compiler/scala/tools/nsc/interactive/BuildManager.scala
index 57a91bf332..302bba0e07 100644
--- a/src/compiler/scala/tools/nsc/interactive/BuildManager.scala
+++ b/src/compiler/scala/tools/nsc/interactive/BuildManager.scala
@@ -68,7 +68,8 @@ object BuildManagerTest extends EvalLoop {
}
val settings = new Settings(error)
- val command = new CompilerCommand(args.toList, settings, error, false)
+ settings.Ybuildmanagerdebug.value = true
+ val command = new CompilerCommand(args.toList, settings)
// settings.make.value = "off"
// val buildManager: BuildManager = new SimpleBuildManager(settings)
val buildManager: BuildManager = new RefinedBuildManager(settings)
@@ -78,7 +79,7 @@ object BuildManagerTest extends EvalLoop {
// enter resident mode
loop { line =>
val args = line.split(' ').toList
- val command = new CompilerCommand(args, new Settings(error), error, true)
+ val command = new CompilerCommand(args, settings)
buildManager.update(command.files, Set.empty)
}
diff --git a/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala b/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala
index ab02ae7460..26f7fb1115 100644
--- a/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala
+++ b/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala
@@ -2,7 +2,7 @@ package scala.tools.nsc
package interactive
import scala.concurrent.SyncVar
-import scala.util.control.ControlException
+import scala.util.control.ControlThrowable
import scala.tools.nsc.io.AbstractFile
import scala.tools.nsc.util.{SourceFile, Position, WorkScheduler}
import scala.tools.nsc.symtab._
@@ -124,23 +124,19 @@ trait CompilerControl { self: Global =>
}
/** Cancel currently pending high-priority jobs */
- def askCancel() =
- scheduler.raise(new CancelActionReq)
+ def askCancel() = scheduler raise CancelActionReq
/** Cancel current compiler run and start a fresh one where everything will be re-typechecked
* (but not re-loaded).
*/
- def askReset() =
- scheduler.raise(new FreshRunReq)
+ def askReset() = scheduler raise FreshRunReq
/** Tell the compile server to shutdown, and do not restart again */
- def askShutdown() =
- scheduler.raise(new ShutdownReq)
+ def askShutdown() = scheduler raise ShutdownReq
- // ---------------- Interpreted exeptions -------------------
-
- class CancelActionReq extends Exception with ControlException
- class FreshRunReq extends Exception with ControlException
- class ShutdownReq extends Exception with ControlException
+ // ---------------- Interpreted exceptions -------------------
+ object CancelActionReq extends ControlThrowable
+ object FreshRunReq extends ControlThrowable
+ object ShutdownReq extends ControlThrowable
}
diff --git a/src/compiler/scala/tools/nsc/interactive/Global.scala b/src/compiler/scala/tools/nsc/interactive/Global.scala
index 35d212071e..03fd92235d 100644
--- a/src/compiler/scala/tools/nsc/interactive/Global.scala
+++ b/src/compiler/scala/tools/nsc/interactive/Global.scala
@@ -5,7 +5,7 @@ import java.io.{ PrintWriter, StringWriter }
import scala.collection.mutable.{LinkedHashMap, SynchronizedMap}
import scala.concurrent.SyncVar
-import scala.util.control.ControlException
+import scala.util.control.ControlThrowable
import scala.tools.nsc.io.AbstractFile
import scala.tools.nsc.util.{SourceFile, Position, RangePosition, OffsetPosition, NoPosition, WorkScheduler}
import scala.tools.nsc.reporters._
@@ -91,10 +91,10 @@ self =>
// it will still be null now?
if (context.unit != null)
integrateNew()
- throw new FreshRunReq
- } catch {
- case ex : ValidateError => // Ignore, this will have been reported elsewhere
- case t : Throwable => throw t
+ throw FreshRunReq
+ }
+ catch {
+ case ex : ValidateException => // Ignore, this will have been reported elsewhere
}
}
}
@@ -110,14 +110,14 @@ self =>
// ----------------- Polling ---------------------------------------
/** Called from runner thread and signalDone:
- * Poll for exeptions.
+ * Poll for exceptions.
* Poll for work reload/typedTreeAt/doFirst commands during background checking.
*/
def pollForWork() {
- scheduler.pollException() match {
- case Some(ex: CancelActionReq) => if (acting) throw ex
- case Some(ex: FreshRunReq) =>
- currentTyperRun = new TyperRun()
+ scheduler.pollThrowable() match {
+ case Some(ex @ CancelActionReq) => if (acting) throw ex
+ case Some(ex @ FreshRunReq) =>
+ currentTyperRun = newTyperRun
minRunId = currentRunId
if (outOfDate) throw ex
else outOfDate = true
@@ -132,7 +132,7 @@ self =>
action()
if (debugIDE) println("done with work item: "+action)
} catch {
- case ex: CancelActionReq =>
+ case CancelActionReq =>
if (debugIDE) println("cancelled work item: "+action)
} finally {
if (debugIDE) println("quitting work item: "+action)
@@ -150,7 +150,7 @@ self =>
val tree = locateTree(pos)
val sw = new StringWriter
val pw = new PrintWriter(sw)
- treePrinters.create(pw).print(tree)
+ newTreePrinter(pw).print(tree)
pw.flush
val typed = new Response[Tree]
@@ -159,7 +159,7 @@ self =>
case Some(tree) =>
val sw = new StringWriter
val pw = new PrintWriter(sw)
- treePrinters.create(pw).print(tree)
+ newTreePrinter(pw).print(tree)
pw.flush
sw.toString
case None => "<None>"
@@ -195,19 +195,19 @@ self =>
backgroundCompile()
outOfDate = false
} catch {
- case ex: FreshRunReq =>
+ case FreshRunReq =>
}
}
}
} catch {
- case ex: ShutdownReq =>
+ case ShutdownReq =>
;
case ex =>
outOfDate = false
compileRunner = newRunnerThread
ex match {
- case _ : FreshRunReq => // This shouldn't be reported
- case _ : ValidateError => // This will have been reported elsewhere
+ case FreshRunReq => // This shouldn't be reported
+ case _ : ValidateException => // This will have been reported elsewhere
case _ => ex.printStackTrace(); inform("Fatal Error: "+ex)
}
}
@@ -222,7 +222,7 @@ self =>
reporter.reset
firsts = firsts filter (s => unitOfFile contains (s.file))
val prefix = firsts map unitOf
- val units = prefix ::: (unitOfFile.valuesIterator.toList diff prefix) filter (!_.isUpToDate)
+ val units = prefix ::: (unitOfFile.values.toList diff prefix) filter (!_.isUpToDate)
recompile(units)
if (debugIDE) inform("Everything is now up to date")
}
@@ -269,14 +269,14 @@ self =>
firsts = fs ::: (firsts diff fs)
}
- // ----------------- Implementations of client commmands -----------------------
+ // ----------------- Implementations of client commands -----------------------
def respond[T](result: Response[T])(op: => T): Unit =
try {
result set Left(op)
return
} catch {
- case ex : FreshRunReq =>
+ case ex @ FreshRunReq =>
scheduler.postWorkItem(() => respond(result)(op))
throw ex
case ex =>
@@ -286,7 +286,7 @@ self =>
/** Make sure a set of compilation units is loaded and parsed */
def reloadSources(sources: List[SourceFile]) {
- currentTyperRun = new TyperRun()
+ currentTyperRun = newTyperRun
for (source <- sources) {
val unit = new RichCompilationUnit(source)
unitOfFile(source.file) = unit
@@ -298,7 +298,7 @@ self =>
/** Make sure a set of compilation units is loaded and parsed */
def reload(sources: List[SourceFile], result: Response[Unit]) {
respond(result)(reloadSources(sources))
- if (outOfDate) throw new FreshRunReq
+ if (outOfDate) throw FreshRunReq
else outOfDate = true
}
@@ -333,7 +333,7 @@ self =>
def stabilizedType(tree: Tree): Type = tree match {
case Ident(_) if tree.symbol.isStable => singleType(NoPrefix, tree.symbol)
- case Select(qual, _) if tree.symbol.isStable => singleType(qual.tpe, tree.symbol)
+ case Select(qual, _) if qual.tpe != null && tree.symbol.isStable => singleType(qual.tpe, tree.symbol)
case Import(expr, selectors) =>
tree.symbol.info match {
case analyzer.ImportType(expr) => expr match {
@@ -387,7 +387,7 @@ self =>
addScopeMember(sym, pre, imp.qual)
}
}
- val result = locals.valuesIterator.toList
+ val result = locals.values.toList
if (debugIDE) for (m <- result) println(m)
result
}
@@ -398,18 +398,24 @@ self =>
}
def typeMembers(pos: Position): List[TypeMember] = {
- val tree1 = typedTreeAt(pos)
- val tree0 = tree1 match {
- case tt : TypeTree => tt.original
- case t => t
+ var tree = typedTreeAt(pos)
+ tree match {
+ case tt : TypeTree => tree = tt.original
+ case _ =>
}
- val tree = tree0 match {
- case s@Select(qual, name) if s.tpe == ErrorType => qual
- case t => t
+
+ tree match {
+ case Select(qual, name) if tree.tpe == ErrorType => tree = qual
+ case _ =>
}
- println("typeMembers at "+tree+" "+tree.tpe)
val context = doLocateContext(pos)
+
+ if (tree.tpe == null)
+ tree = analyzer.newTyper(context).typedQualifier(tree)
+
+ println("typeMembers at "+tree+" "+tree.tpe)
+
val superAccess = tree.isInstanceOf[Super]
val scope = new Scope
val members = new LinkedHashMap[Symbol, TypeMember]
@@ -449,7 +455,7 @@ self =>
addTypeMember(sym, vpre, false, view.tree.symbol)
}
}
- members.valuesIterator.toList
+ members.values.toList
}
// ---------------- Helper classes ---------------------------
@@ -466,12 +472,20 @@ self =>
/** The typer run */
class TyperRun extends Run {
// units is always empty
- // symSource, symData are ignored
- override def compiles(sym: Symbol) = false
- def typeCheck(unit: CompilationUnit): Unit = applyPhase(typerPhase, unit)
+ /** canRedefine is used to detect double declarations in multiple source files.
+ * Since the IDE rechecks units several times in the same run, these tests
+ * are disabled by always returning true here.
+ */
+ override def canRedefine(sym: Symbol) = true
- def enterNames(unit: CompilationUnit): Unit = applyPhase(namerPhase, unit)
+ def typeCheck(unit: CompilationUnit): Unit = {
+ applyPhase(typerPhase, unit)
+ }
+
+ def enterNames(unit: CompilationUnit): Unit = {
+ applyPhase(namerPhase, unit)
+ }
/** Return fully attributed tree at given position
* (i.e. largest tree that's contained by position)
@@ -480,7 +494,7 @@ self =>
println("starting typedTreeAt")
val tree = locateTree(pos)
println("at pos "+pos+" was found: "+tree+tree.pos.show)
- if (tree.tpe ne null) {
+ if (stabilizedType(tree) ne null) {
println("already attributed")
tree
} else {
@@ -518,7 +532,9 @@ self =>
}
}
- class TyperResult(val tree: Tree) extends Exception with ControlException
+ def newTyperRun = new TyperRun
+
+ class TyperResult(val tree: Tree) extends ControlThrowable
assert(globalPhase.id == 0)
}
diff --git a/src/compiler/scala/tools/nsc/interactive/REPL.scala b/src/compiler/scala/tools/nsc/interactive/REPL.scala
index f3b1900ef2..5589ddb9b1 100644
--- a/src/compiler/scala/tools/nsc/interactive/REPL.scala
+++ b/src/compiler/scala/tools/nsc/interactive/REPL.scala
@@ -28,7 +28,7 @@ object REPL {
def process(args: Array[String]) {
val settings = new Settings(error)
reporter = new ConsoleReporter(settings)
- val command = new CompilerCommand(args.toList, settings, error, false)
+ val command = new CompilerCommand(args.toList, settings)
if (command.settings.version.value)
reporter.info(null, versionMsg, true)
else {
diff --git a/src/compiler/scala/tools/nsc/interactive/RangePositions.scala b/src/compiler/scala/tools/nsc/interactive/RangePositions.scala
index 49fc8be185..337f306664 100644
--- a/src/compiler/scala/tools/nsc/interactive/RangePositions.scala
+++ b/src/compiler/scala/tools/nsc/interactive/RangePositions.scala
@@ -103,7 +103,7 @@ self: scala.tools.nsc.Global =>
/** Ensure that given tree has no positions that overlap with
* any of the positions of `others`. This is done by
- * shortening the range or assinging TransparentPositions
+ * shortening the range or assigning TransparentPositions
* to some of the nodes in `tree`.
*/
override def ensureNonOverlapping(tree: Tree, others: List[Tree]) {
@@ -199,7 +199,7 @@ self: scala.tools.nsc.Global =>
inform(tree.toString)
inform("")
inform("=======")
- throw new ValidateError(msg)
+ throw new ValidateException(msg)
}
def validate(tree: Tree, encltree: Tree): Unit = {
@@ -238,7 +238,7 @@ self: scala.tools.nsc.Global =>
validate(tree, tree)
}
- class ValidateError(msg : String) extends Exception(msg)
+ class ValidateException(msg : String) extends Exception(msg)
// ---------------- Locating trees ----------------------------------
diff --git a/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala b/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala
index 9ca9a740df..3f59824ec2 100644
--- a/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala
+++ b/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala
@@ -41,6 +41,7 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana
val compiler = newCompiler(settings)
import compiler.{Symbol, Type, atPhase, currentRun}
+ import compiler.dependencyAnalysis.Inherited
private case class SymWithHistory(sym: Symbol, befErasure: Type)
@@ -55,6 +56,15 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana
/** External references used by source file. */
private var references: mutable.Map[AbstractFile, immutable.Set[String]] = _
+ /** External references for inherited members */
+ private var inherited: mutable.Map[AbstractFile, immutable.Set[Inherited]] = _
+
+ /** Reverse of definitions, used for caching */
+ private var classes: mutable.Map[String, AbstractFile] =
+ new mutable.HashMap[String, AbstractFile] {
+ override def default(key: String) = null
+ }
+
/** Add the given source files to the managed build process. */
def addSourceFiles(files: Set[AbstractFile]) {
sources ++= files
@@ -73,7 +83,7 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana
private def invalidatedByRemove(files: Set[AbstractFile]): Set[AbstractFile] = {
val changes = new mutable.HashMap[Symbol, List[Change]]
for (f <- files; SymWithHistory(sym, _) <- definitions(f))
- changes += sym -> List(Removed(Class(sym.fullNameString)))
+ changes += sym -> List(Removed(Class(sym.fullName)))
invalidated(files, changes)
}
@@ -92,26 +102,43 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana
private def update(files: Set[AbstractFile]) = {
val coll: mutable.Map[AbstractFile, immutable.Set[AbstractFile]] =
mutable.HashMap[AbstractFile, immutable.Set[AbstractFile]]()
+ compiler.reporter.reset
- // See if we really have coresponding symbols, not just those
+ // See if we really have corresponding symbols, not just those
// which share the name
def isCorrespondingSym(from: Symbol, to: Symbol): Boolean =
(from.hasFlag(Flags.TRAIT) == to.hasFlag(Flags.TRAIT)) &&
(from.hasFlag(Flags.MODULE) == to.hasFlag(Flags.MODULE))
+ // For testing purposes only, order irrelevant for compilation
+ def toStringSet(set: Set[AbstractFile]): String = {
+ val s = set.toList sortBy (_.name)
+ s.mkString("Set(", ", ", ")")
+ }
+
def update0(files: Set[AbstractFile]): Unit = if (!files.isEmpty) {
deleteClassfiles(files)
val run = compiler.newRun()
- compiler.inform("compiling " + files)
+ if (settings.Ybuildmanagerdebug.value)
+ compiler.inform("compiling " + toStringSet(files))
buildingFiles(files)
run.compileFiles(files.toList)
if (compiler.reporter.hasErrors) {
- compiler.reporter.reset
return
}
- val changesOf = new mutable.HashMap[Symbol, List[Change]]
+ // Deterministic behaviour required by partest
+ val changesOf = new mutable.HashMap[Symbol, List[Change]] {
+ override def toString: String = {
+ val changesOrdered =
+ toList.map(e => {
+ e._1.toString + " -> " +
+ e._2.sortBy(_.toString).mkString("List(", ", ", ")")
+ })
+ changesOrdered.sorted.mkString("Map(", ", ", ")")
+ }
+ }
val additionalDefs: mutable.HashSet[AbstractFile] = mutable.HashSet.empty
val defs = compiler.dependencyAnalysis.definitions
@@ -123,7 +150,7 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana
val syms = defs(src)
for (sym <- syms) {
definitions(src).find(
- s => (s.sym.fullNameString == sym.fullNameString) &&
+ s => (s.sym.fullName == sym.fullName) &&
isCorrespondingSym(s.sym, sym)) match {
case Some(SymWithHistory(oldSym, info)) =>
val changes = changeSet(oldSym.info, sym)
@@ -131,7 +158,7 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana
atPhase(currentRun.erasurePhase.prev) {
changeSet(info, sym)
}
- changesOf(oldSym) = (changes ++ changesErasure).removeDuplicates
+ changesOf(oldSym) = (changes ++ changesErasure).distinct
case _ =>
// a new top level definition
changesOf(sym) =
@@ -142,13 +169,14 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana
}
// Create a change for the top level classes that were removed
val removed = definitions(src) filterNot ((s:SymWithHistory) =>
- syms.find(_.fullNameString == (s.sym.fullNameString)) != None)
+ syms.find(_.fullName == (s.sym.fullName)) != None)
for (s <- removed) {
changesOf(s.sym) = List(removeChangeSet(s.sym))
}
}
}
- println("Changes: " + changesOf)
+ if (settings.Ybuildmanagerdebug.value)
+ compiler.inform("Changes: " + changesOf)
updateDefinitions(files)
val invalid = invalidated(files, changesOf, additionalDefs)
update0(checkCycles(invalid, files, coll))
@@ -196,18 +224,20 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana
compiler.dependencyAnalysis.dependencies.dependentFiles(1, files)
def invalidate(file: AbstractFile, reason: String, change: Change) = {
- println("invalidate " + file + " because " + reason + " [" + change + "]")
+ if (settings.Ybuildmanagerdebug.value)
+ compiler.inform("invalidate " + file + " because " + reason + " [" + change + "]")
buf += file
directDeps -= file
for (syms <- definitions(file)) // fixes #2557
- newChangesOf(syms.sym) = List(change)
+ newChangesOf(syms.sym) = List(change, parentChangeSet(syms.sym))
break
}
for ((oldSym, changes) <- changesOf; change <- changes) {
def checkParents(cls: Symbol, file: AbstractFile) {
- val parentChange = cls.info.parents.exists(_.typeSymbol.fullNameString == oldSym.fullNameString)
- // println("checkParents " + cls + " oldSym: " + oldSym + " parentChange: " + parentChange + " " + cls.info.parents)
+ val parentChange = cls.info.parents.exists(_.typeSymbol.fullName == oldSym.fullName)
+ // if (settings.buildmanagerdebug.value)
+ // compiler.inform("checkParents " + cls + " oldSym: " + oldSym + " parentChange: " + parentChange + " " + cls.info.parents)
change match {
case Changed(Class(_)) if parentChange =>
invalidate(file, "parents have changed", change)
@@ -228,10 +258,10 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana
def checkInterface(cls: Symbol, file: AbstractFile) {
change match {
case Added(Definition(name)) =>
- if (cls.info.decls.iterator.exists(_.fullNameString == name))
+ if (cls.info.decls.iterator.exists(_.fullName == name))
invalidate(file, "of new method with existing name", change)
case Changed(Class(name)) =>
- if (cls.info.typeSymbol.fullNameString == name)
+ if (cls.info.typeSymbol.fullName == name)
invalidate(file, "self type changed", change)
case _ =>
()
@@ -239,7 +269,8 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana
}
def checkReferences(file: AbstractFile) {
- // println(file + ":" + references(file))
+ //if (settings.buildmanagerdebug.value)
+ // compiler.inform(file + ":" + references(file))
val refs = references(file)
if (refs.isEmpty)
invalidate(file, "it is a direct dependency and we don't yet have finer-grained dependency information", change)
@@ -260,11 +291,28 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana
}
}
+ def checkInheritedReferences(file: AbstractFile) {
+ val refs = inherited(file)
+ if (!refs.isEmpty)
+ change match {
+ case ParentChanged(Class(name)) =>
+ for (Inherited(q, member) <- refs.find(p => (p != null && p.qualifier == name));
+ classFile <- classes.get(q);
+ defs <- definitions.get(classFile);
+ s <- defs.find(p => p.sym.fullName == q)
+ if ((s.sym).tpe.nonPrivateMember(member) == compiler.NoSymbol))
+ invalidate(file, "it references invalid (no longer inherited) definition", change)
+ ()
+ case _ => ()
+ }
+ }
+
for (file <- directDeps) {
breakable {
for (cls <- definitions(file)) checkParents(cls.sym, file)
for (cls <- definitions(file)) checkInterface(cls.sym, file)
checkReferences(file)
+ checkInheritedReferences(file)
}
}
}
@@ -278,6 +326,7 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana
private def updateDefinitions(files: Set[AbstractFile]) {
for (src <- files; val localDefs = compiler.dependencyAnalysis.definitions(src)) {
definitions(src) = (localDefs map (s => {
+ this.classes += s.fullName -> src
SymWithHistory(
s.cloneSymbol,
atPhase(currentRun.erasurePhase.prev) {
@@ -286,6 +335,7 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana
}))
}
this.references = compiler.dependencyAnalysis.references
+ this.inherited = compiler.dependencyAnalysis.inherited
}
/** Load saved dependency information. */
diff --git a/src/compiler/scala/tools/nsc/interpreter/ByteCode.scala b/src/compiler/scala/tools/nsc/interpreter/ByteCode.scala
index d66cbb7818..a998f9dfc8 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ByteCode.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/ByteCode.scala
@@ -7,6 +7,7 @@ package scala.tools.nsc
package interpreter
import java.io.File
+import java.lang.reflect
import java.util.jar.{ JarEntry, JarFile }
import java.util.concurrent.ConcurrentHashMap
import util.ScalaClassLoader.getSystemLoader
@@ -15,17 +16,43 @@ object ByteCode {
/** Until I figure out why I can't get scalap onto the classpath such
* that the compiler will bootstrap, we have to use reflection.
*/
- private lazy val DECODE: Option[String => Option[Map[String, String]]] =
- for (clazz <- getSystemLoader.tryToLoadClass[AnyRef]("scala.tools.scalap.Decode$")) yield {
- val module = clazz.getField("MODULE$").get()
- val method = clazz.getMethod("typeAliases", classOf[String])
- val map = method.invoke(module, _: String).asInstanceOf[Option[Map[String, String]]]
- map
+ private lazy val DECODER: Option[AnyRef] =
+ for (clazz <- getSystemLoader.tryToLoadClass[AnyRef]("scala.tools.scalap.Decode$")) yield
+ clazz.getField("MODULE$").get()
+
+ private def decoderMethod(name: String, args: Class[_]*): Option[reflect.Method] = {
+ for (decoder <- DECODER ; m <- Option(decoder.getClass.getMethod(name, args: _*))) yield m
+ }
+
+ private lazy val aliasMap = {
+ for (module <- DECODER ; method <- decoderMethod("typeAliases", classOf[String])) yield
+ method.invoke(module, _: String).asInstanceOf[Option[Map[String, String]]]
+ }
+
+ /** Scala sig bytes.
+ */
+ def scalaSigBytesForPath(path: String) =
+ for {
+ module <- DECODER
+ method <- decoderMethod("scalaSigBytes", classOf[String], classOf[ClassLoader])
+ names <- method.invoke(module, path, this.getClass.getClassLoader).asInstanceOf[Option[Array[Byte]]]
}
+ yield names
- def aliasesForPackage(pkg: String) = DECODE flatMap (_(pkg))
+ /** Attempts to retrieve case parameter names for given class name.
+ */
+ def caseParamNamesForPath(path: String) =
+ for {
+ module <- DECODER
+ method <- decoderMethod("caseParamNames", classOf[String])
+ names <- method.invoke(module, path).asInstanceOf[Option[List[String]]]
+ }
+ yield names
- /** Use scalap to look through type aliases */
+ def aliasesForPackage(pkg: String) = aliasMap flatMap (_(pkg))
+
+ /** Attempts to find type aliases in package objects.
+ */
def aliasForType(path: String): Option[String] = {
val (pkg, name) = (path lastIndexOf '.') match {
case -1 => return None
diff --git a/src/compiler/scala/tools/nsc/interpreter/Completion.scala b/src/compiler/scala/tools/nsc/interpreter/Completion.scala
index 2b9538b3fc..b62de995c2 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Completion.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/Completion.scala
@@ -2,7 +2,6 @@
* Copyright 2005-2010 LAMP/EPFL
* @author Paul Phillips
*/
-// $Id$
//
// TODO, if practical:
@@ -11,310 +10,180 @@
// Possible approach: evaluate buffer as if current identifier is
// 2) Implicits: x.<tab> should show not only x's members but those of anything for which
// there is an implicit conversion from x.
-// 3) Chaining: x.foo(bar).<tab> should complete on foo's result type.
// 4) Imports: after import scala.collection.mutable._, HashMap should be among
// my top level identifiers.
-// 5) Caching: it's silly to parse all the jars on every startup, we should have
-// a peristent store somewhere we can write and only check last-mod dates.
-// 6) Security: Are we using the filesystem unnecessarily?
-//
+// 5) Caching: parsing the jars every startup seems wasteful, but experimentally
+// there is little to no gain from caching.
package scala.tools.nsc
package interpreter
import jline._
import java.net.URL
-import java.util.concurrent.ConcurrentHashMap
-import scala.concurrent.DelayedLazyVal
-import scala.collection.mutable.HashSet
-import scala.util.NameTransformer.{ decode, encode }
-
-// REPL completor - queries supplied interpreter for valid completions
-// based on current contents of buffer.
-class Completion(
- val interpreter: Interpreter,
- val intLoop: InterpreterLoop)
-extends Completor {
- def this(interpreter: Interpreter) = this(interpreter, null)
-
- import Completion._
- import java.util.{ List => JList }
- import interpreter.compilerClasspath
-
- // it takes a little while to look through the jars so we use a future and a concurrent map
- class CompletionAgent {
- val dottedPaths = new ConcurrentHashMap[String, List[CompletionInfo]]
- val topLevelPackages = new DelayedLazyVal(
- () => enumToList(dottedPaths.keys) filterNot (_ contains '.'),
- getDottedPaths(dottedPaths, interpreter)
- )
- }
- val agent = new CompletionAgent
- import agent._
-
- import java.lang.reflect.Modifier.{ isPrivate, isProtected, isPublic, isStatic }
- private def isSingleton(x: Int, isJava: Boolean) = !isJava || isStatic(x)
- private def existsAndPublic(s: String): Boolean =
- (dottedPaths containsKey s) || {
- val clazz =
- try Class.forName(s)
- catch { case _: ClassNotFoundException | _: SecurityException => return false }
-
- isPublic(clazz.getModifiers)
- }
+import java.util.{ List => JList }
+import java.lang.reflect
+import scala.tools.util.PathResolver
+import io.{ Path, Directory }
- // One instance of a command line
- class Buffer(s: String, verbose: Boolean) {
- val buffer = if (s == null) "" else s
- def isEmptyBuffer = buffer == ""
+object Completion {
+ // methods to leave out of completion
+ val excludeMethods = List("hashCode", "equals", "wait", "notify", "notifyAll")
- val segments = buffer.split("\\.", -1).toList
- val lastDot = buffer.lastIndexOf('.')
- val hasDot = segments.size > 0 && segments.last == ""
+ // strings to look for an exclude by default
+ val excludeStrings = List("$$super", "MODULE$")
- // given foo.bar.baz, path = foo.bar and stub = baz
- val (path, stub) = segments.size match {
- case 0 => ("", "")
- case 1 => (segments.head, "")
- case _ => (segments.init.mkString("."), segments.last)
- }
+ def looksLikeInvocation(code: String) = (
+ (code != null)
+ && (code startsWith ".")
+ && !(code startsWith "./")
+ && !(code startsWith "..")
+ )
- def filt(xs: List[String]) = xs filter (_ startsWith stub)
+ trait Forwarder extends CompletionAware {
+ def forwardTo: Option[CompletionAware]
- case class Result(candidates: List[String], position: Int) {
- def getCandidates() = (candidates map (_.trim) removeDuplicates) sortWith (_ < _)
+ override def completions() = forwardTo map (_.completions()) getOrElse Nil
+ override def follow(s: String) = forwardTo flatMap (_ follow s)
+ }
+}
+import Completion._
+
+// REPL completor - queries supplied interpreter for valid
+// completions based on current contents of buffer.
+class Completion(repl: Interpreter) {
+ self =>
+
+ private lazy val classPath = repl.compilerClasspath
+
+ // the unqualified vals/defs/etc visible in the repl
+ val ids = new IdentCompletion(repl)
+ // the top level packages we know about
+ val pkgs = new PackageCompletion(classPath)
+ // members of Predef
+ val predef = new StaticCompletion(classOf[scala.Predef$]) {
+ override def filterNotFunction(s: String) = (
+ (s contains "2") ||
+ (s startsWith "wrap") ||
+ (s endsWith "Wrapper") ||
+ (s endsWith "Ops")
+ )
+ }
+ // members of scala.*
+ val scalalang = new pkgs.SubCompletor("scala") with Forwarder {
+ def forwardTo = pkgs follow "scala"
+ val arityClasses = {
+ val names = List("Tuple", "Product", "Function")
+ val expanded = for (name <- names ; index <- 0 to 22 ; dollar <- List("", "$")) yield name + index + dollar
+
+ Set(expanded: _*)
}
- // work out completion candidates and position
- def analyzeBuffer(clist: JList[String]): Result = {
- lazy val ids = idsStartingWith(path)
- lazy val pkgs = pkgsStartingWith(path)
- lazy val count = (ids ::: pkgs).size
+ override def filterNotFunction(s: String) = {
+ val simple = s.reverse takeWhile (_ != '.') reverse
- def doSimple(): Result = count match {
- case 0 => Result(Nil, 0)
- case 1 if pkgs.size > 0 => Result(pkgs, 0)
- case 1 if buffer.length < ids.head.length => Result(ids, 0)
- case 1 => Result(ids, 0)
- // XXX for now commented out "dot inference" because it's overcomplicated
- // val members = membersOfId(ids.head) filter (_ startsWith stub)
- // if (members.isEmpty) Result(Nil, 0)
- // else Result(members, path.length + 1)
- case _ => Result(ids ::: pkgs, 0)
- }
-
- def doDotted(): Result = {
- def pkgs = membersOfPath(path)
- def ids = membersOfId(path)
- def idExtras = List("isInstanceOf", "asInstanceOf", "toString")
- def statics = completeStaticMembers(path)
- def pkgMembers = completePackageMembers(path)
-
- def calcList = if (pkgs.isEmpty) ids ::: idExtras ::: statics else pkgs
- def idList = filt(calcList ::: pkgMembers)
-
- Result(idList.removeDuplicates, path.length + 1)
- }
-
- segments.size match {
- case 0 => Result(Nil, 0)
- case 1 => doSimple()
- case _ => doDotted()
- }
+ (arityClasses contains simple) ||
+ (s endsWith "Exception") ||
+ (s endsWith "Error")
}
+ }
+ // members of java.lang.*
+ val javalang = new pkgs.SubCompletor("java.lang") with Forwarder {
+ def forwardTo = pkgs follow "java.lang"
+ import reflect.Modifier.isPublic
+ private def existsAndPublic(s: String): Boolean = {
+ val name = if (s contains ".") s else "java.lang." + s
+ val clazz = classForName(name) getOrElse (return false)
- def isValidId(s: String) = interpreter.unqualifiedIds contains s
- def membersOfId(s: String) = interpreter membersOfIdentifier s
- def membersOfPath(s: String) = {
- val xs =
- if (dottedPaths containsKey s) dottedPaths get s map (_.visibleName)
- else Nil
-
- s match {
- case "scala" => xs filterNot scalaToHide
- case "java.lang" => xs filterNot javaLangToHide
- case _ => xs
- }
+ isPublic(clazz.getModifiers)
}
- def membersOfPredef() = membersOfId("scala.Predef")
-
- def javaLangToHide(s: String) = (
+ override def filterNotFunction(s: String) = {
(s endsWith "Exception") ||
(s endsWith "Error") ||
(s endsWith "Impl") ||
- (s startsWith "CharacterData") ||
- !existsAndPublic("java.lang." + s)
- )
-
- def scalaToHide(s: String) =
- (List("Tuple", "Product", "Function") exists (x => (x + """\d+""").r findPrefixMatchOf s isDefined)) ||
- (List("Exception", "Error") exists (s endsWith _))
-
- /** Hide all default members not verbose */
- def defaultMembers =
- if (verbose) (List("scala", "java.lang") flatMap membersOfPath) ::: membersOfPredef
- else Nil
-
- def pkgsStartingWith(s: String) = topLevelPackages() filter (_ startsWith s)
- def idsStartingWith(s: String) = {
- // only print res* when verbose
- val unqIds =
- if (verbose) interpreter.unqualifiedIds
- else interpreter.unqualifiedIds filterNot (_ startsWith INTERPRETER_VAR_PREFIX)
-
- (unqIds ::: defaultMembers) filter (_ startsWith s)
- }
-
- def complete(clist: JList[String]): Int = {
- val res = analyzeBuffer(clist)
- res.getCandidates foreach (x => clist add decode(x))
- res.position
+ (s startsWith "CharacterData")
}
+ override def completions() = super.completions() filter existsAndPublic
}
-
- private def getMembers(c: Class[_], isJava: Boolean): List[String] =
- c.getMethods.toList .
- filter (x => isPublic(x.getModifiers)) .
- filter (x => isSingleton(x.getModifiers, isJava)) .
- map (_.getName) .
- filterNot (shouldHide)
-
- private def getClassObject(path: String): Option[Class[_]] =
- (interpreter getClassObject path) orElse
- (interpreter getClassObject ("scala." + path)) orElse
- (interpreter getClassObject ("java.lang." + path))
-
- def lastHistoryItem =
- for (loop <- Option(intLoop) ; h <- loop.history) yield
- h.getHistoryList.get(h.size - 1)
-
- // Is the buffer the same it was last time they hit tab?
- private var lastTab: (String, String) = (null, null)
-
- // jline's completion comes through here - we ask a Buffer for the candidates.
- override def complete(_buffer: String, cursor: Int, candidates: JList[String]): Int = {
- // println("_buffer = %s, cursor = %d".format(_buffer, cursor))
- val verbose = (_buffer, lastHistoryItem orNull) == lastTab
- lastTab = (_buffer, lastHistoryItem orNull)
-
- new Buffer(_buffer, verbose) complete candidates
+ val literals = new LiteralCompletion {
+ lazy val global = repl.compiler
+ val parent = self
}
- def completePackageMembers(path: String): List[String] =
- getClassObject(path + "." + "package") map (getMembers(_, false)) getOrElse Nil
-
- def completeStaticMembers(path: String): List[String] = {
- // java style, static methods
- val js = getClassObject(path) map (getMembers(_, true)) getOrElse Nil
- // scala style, methods on companion object
- // if getClassObject fails, see if there is a type alias
- val clazz = getClassObject(path + "$") orElse {
- (ByteCode aliasForType path) flatMap (x => getClassObject(x + "$"))
- }
- val ss = clazz map (getMembers(_, false)) getOrElse Nil
-
- js ::: ss
+ def lastResult = new Forwarder {
+ def forwardTo = ids follow repl.mostRecentVar
}
-}
-
-object Completion
-{
- import java.io.File
- import java.util.jar.{ JarEntry, JarFile }
- import scala.tools.nsc.io.Streamable
-
- val EXPAND_SEPARATOR_STRING = "$$"
- val ANON_CLASS_NAME = "$anon"
- val TRAIT_SETTER_SEPARATOR_STRING = "$_setter_$"
- val IMPL_CLASS_SUFFIX ="$class"
- val INTERPRETER_VAR_PREFIX = "res"
- case class CompletionInfo(visibleName: String, className: String, jar: String) {
- lazy val jarfile = new JarFile(jar)
- lazy val entry = jarfile getEntry className
-
- override def hashCode = visibleName.hashCode
- override def equals(other: Any) = other match {
- case x: CompletionInfo => visibleName == x.visibleName
- case _ => false
- }
-
- def getBytes(): Array[Byte] = {
- if (entry == null) Array() else {
- val x = new Streamable.Bytes { def inputStream() = jarfile getInputStream entry }
- x.toByteArray()
- }
- }
+ def lastResultFor(parsed: Parsed) = {
+ /** The logic is a little tortured right now because normally '.' is
+ * ignored as a delimiter, but on .<tab> it needs to be propagated.
+ */
+ val xs = lastResult completionsFor parsed
+ if (parsed.isEmpty) xs map ("." + _) else xs
}
- def enumToList[T](e: java.util.Enumeration[T]): List[T] = enumToList(e, Nil)
- def enumToList[T](e: java.util.Enumeration[T], xs: List[T]): List[T] =
- if (e == null || !e.hasMoreElements) xs else enumToList(e, e.nextElement :: xs)
-
- // methods to leave out of completion
- val excludeMethods = List("", "hashCode", "equals", "wait", "notify", "notifyAll")
-
- private def exists(path: String) = new File(path) exists
+ // the list of completion aware objects which should be consulted
+ val topLevel: List[CompletionAware] = List(ids, pkgs, predef, scalalang, javalang, literals)
- def shouldHide(x: String) =
- (excludeMethods contains x) ||
- (x contains ANON_CLASS_NAME) ||
- (x contains TRAIT_SETTER_SEPARATOR_STRING) ||
- (x endsWith IMPL_CLASS_SUFFIX)
+ // the first tier of top level objects (doesn't include file completion)
+ def topLevelFor(parsed: Parsed) = topLevel flatMap (_ completionsFor parsed)
- def getClassFiles(path: String): List[String] = {
- if (!exists(path)) return Nil
+ // chasing down results which won't parse
+ def execute(line: String): Option[Any] = {
+ val parsed = Parsed(line)
+ def noDotOrSlash = line forall (ch => ch != '.' && ch != '/')
- (enumToList(new JarFile(path).entries) map (_.getName)) .
- partialMap { case x: String if x endsWith ".class" => x dropRight 6 } .
- filterNot { shouldHide }
+ if (noDotOrSlash) None // we defer all unqualified ids to the repl.
+ else {
+ (ids executionFor parsed) orElse
+ (pkgs executionFor parsed) orElse
+ (FileCompletion executionFor line)
+ }
}
- // all the dotted path to classfiles we can find by poking through the jars
- def getDottedPaths(
- map: ConcurrentHashMap[String, List[CompletionInfo]],
- interpreter: Interpreter): Unit =
- {
- val cp =
- interpreter.compilerClasspath.map(_.getPath) ::: // compiler jars, scala-library.jar etc.
- interpreter.settings.bootclasspath.value.split(':').toList // boot classpath, java.lang.* etc.
-
- val jars = cp.removeDuplicates filter (_ endsWith ".jar")
-
- // for e.g. foo.bar.baz.C, returns (foo -> bar), (foo.bar -> baz), (foo.bar.baz -> C)
- // and scala.Range$BigInt needs to go scala -> Range -> BigInt
- def subpaths(s: String): List[(String, String)] = {
- val segs = decode(s).split("""[/.]""")
- val components = segs dropRight 1
-
- (1 to components.length).toList flatMap { i =>
- val k = components take i mkString "."
- if (segs(i) contains "$") {
- val dollarsegs = segs(i).split("$").toList
- for (j <- 1 to (dollarsegs.length - 1) toList) yield {
- val newk = k + "." + (dollarsegs take j mkString ".")
- (k -> dollarsegs(j))
- }
+ // override if history is available
+ def lastCommand: Option[String] = None
+
+ // jline's entry point
+ lazy val jline: ArgumentCompletor =
+ returning(new ArgumentCompletor(new JLineCompletion, new JLineDelimiter))(_ setStrict false)
+
+ class JLineCompletion extends Completor {
+ // For recording the buffer on the last tab hit
+ private var lastTab: (String, String) = (null, null)
+
+ // Does this represent two consecutive tabs?
+ def isConsecutiveTabs(buf: String) = (buf, lastCommand orNull) == lastTab
+
+ // verbosity goes up with consecutive tabs
+ // TODO - actually implement.
+ private var verbosity = 0
+
+ // This is jline's entry point for completion.
+ override def complete(buf: String, cursor: Int, candidates: JList[String]): Int = {
+ // println("complete: buf = %s, cursor = %d".format(buf, cursor))
+ verbosity = if (isConsecutiveTabs(buf)) verbosity + 1 else 0
+ lastTab = (buf, lastCommand orNull)
+
+ // we don't try lower priority completions unless higher ones return no results.
+ def tryCompletion(p: Parsed, completionFunction: Parsed => List[String]): Option[Int] = {
+ completionFunction(p) match {
+ case Nil => None
+ case xs =>
+ // modify in place and return the position
+ xs foreach (candidates add _)
+ Some(p.position)
}
- else List(k -> segs(i))
}
- }
- def oneJar(jar: String): Unit = {
- val classfiles = Completion getClassFiles jar
+ // a single dot is special cased to completion on the previous result
+ def lastResultCompletion =
+ if (!looksLikeInvocation(buf)) None
+ else tryCompletion(Parsed.dotted(buf drop 1, cursor), lastResultFor)
- for (cl <- classfiles.removeDuplicates ; (k, _v) <- subpaths(cl)) {
- val v = CompletionInfo(_v, cl, jar)
+ def regularCompletion = tryCompletion(Parsed.dotted(buf, cursor), topLevelFor)
+ def fileCompletion = tryCompletion(Parsed.undelimited(buf, cursor), FileCompletion completionsFor _.buffer)
- if (map containsKey k) {
- val vs = map.get(k)
- if (vs contains v) ()
- else map.put(k, v :: vs)
- }
- else map.put(k, List(v))
- }
+ (lastResultCompletion orElse regularCompletion orElse fileCompletion) getOrElse cursor
}
-
- jars foreach oneJar
}
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala b/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala
new file mode 100644
index 0000000000..7e94b687bf
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala
@@ -0,0 +1,113 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+import scala.reflect.NameTransformer
+
+/** An interface for objects which are aware of tab completion and
+ * will supply their own candidates and resolve their own paths.
+ */
+trait CompletionAware {
+ /** The delimiters which are meaningful when this CompletionAware
+ * object is in control.
+ */
+ // TODO
+ // def delimiters(): List[Char] = List('.')
+
+ /** The complete list of unqualified Strings to which this
+ * object will complete.
+ */
+ def completions(): List[String]
+ def completions(start: String): List[String] = completions filter (_ startsWith start)
+
+ /** Default filter to apply to completions.
+ */
+ def filterNotFunction(s: String): Boolean = false
+
+ /** Default sort.
+ */
+ def sortFunction(s1: String, s2: String): Boolean = s1 < s2
+
+ /** Default map.
+ */
+ def mapFunction(s: String) = NameTransformer decode s
+
+ /** The next completor in the chain.
+ */
+ def follow(id: String): Option[CompletionAware] = None
+
+ /** What to return if this completion is given as a command. It
+ * returns None by default, which means to allow the repl to interpret
+ * the line normally. Returning Some(_) means the line will never
+ * reach the scala interpreter.
+ */
+ def execute(id: String): Option[Any] = None
+
+ /** Given string 'buf', return a list of all the strings
+ * to which it can complete. This may involve delegating
+ * to other CompletionAware objects.
+ */
+ def completionsFor(parsed: Parsed): List[String] = {
+ import parsed._
+
+ val cs =
+ if (isEmpty) completions()
+ else if (isUnqualified && !isLastDelimiter) completions(buffer)
+ else follow(bufferHead) map (_ completionsFor bufferTail) getOrElse Nil
+
+ cs filterNot filterNotFunction map mapFunction sortWith (sortFunction _)
+ }
+
+ /** TODO - unify this and completionsFor under a common traverser.
+ */
+ def executionFor(parsed: Parsed): Option[Any] = {
+ import parsed._
+
+ if (isUnqualified && !isLastDelimiter && (completions contains buffer)) execute(buffer)
+ else if (!isQualified) None
+ else follow(bufferHead) flatMap (_ executionFor bufferTail)
+ }
+}
+
+object CompletionAware {
+ val Empty = new CompletionAware { val completions = Nil }
+
+ // class Forwarder(underlying: CompletionAware) extends CompletionAware {
+ // override def completions() = underlying.completions()
+ // override def filterNotFunction(s: String) = underlying.filterNotFunction(s)
+ // override def sortFunction(s1: String, s2: String) = underlying.sortFunction(s1, s2)
+ // override def mapFunction(s: String) = underlying.mapFunction(s)
+ // override def follow(id: String) = underlying.follow(id)
+ // override def execute(id: String) = underlying.execute(id)
+ // override def completionsFor(parsed: Parsed) = underlying.completionsFor(parsed)
+ // override def executionFor(parsed: Parsed) = underlying.executionFor(parsed)
+ // }
+ //
+
+ def unapply(that: Any): Option[CompletionAware] = that match {
+ case x: CompletionAware => Some((x))
+ case _ => None
+ }
+
+ /** Create a CompletionAware object from the given functions.
+ * The first should generate the list of completions whenever queried,
+ * and the second should return Some(CompletionAware) object if
+ * subcompletions are possible.
+ */
+ def apply(terms: () => List[String], followFunction: String => Option[CompletionAware]): CompletionAware =
+ new CompletionAware {
+ def completions = terms()
+ override def follow(id: String) = followFunction(id)
+ }
+
+ /** Convenience factories.
+ */
+ def apply(terms: () => List[String]): CompletionAware = apply(terms, _ => None)
+ def apply(map: collection.Map[String, CompletionAware]): CompletionAware =
+ apply(() => map.keys.toList, map.get _)
+}
+
diff --git a/src/compiler/scala/tools/nsc/interpreter/Delimited.scala b/src/compiler/scala/tools/nsc/interpreter/Delimited.scala
new file mode 100644
index 0000000000..cdf5a343da
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/interpreter/Delimited.scala
@@ -0,0 +1,36 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+import jline.ArgumentCompletor.{ ArgumentDelimiter, ArgumentList }
+
+class JLineDelimiter extends ArgumentDelimiter {
+ def delimit(buffer: String, cursor: Int) = Parsed(buffer, cursor).asJlineArgumentList
+ def isDelimiter(buffer: String, cursor: Int) = Parsed(buffer, cursor).isDelimiter
+}
+
+trait Delimited {
+ self: Parsed =>
+
+ def delimited: Char => Boolean
+ def escapeChars: List[Char] = List('\\')
+ def quoteChars: List[(Char, Char)] = List(('\'', '\''), ('"', '"'))
+
+ /** Break String into args based on delimiting function.
+ */
+ protected def toArgs(s: String): List[String] =
+ if (s == "") Nil
+ else (s indexWhere isDelimiterChar) match {
+ case -1 => List(s)
+ case idx => (s take idx) :: toArgs(s drop (idx + 1))
+ }
+
+ def isDelimiterChar(ch: Char) = delimited(ch)
+ def isEscapeChar(ch: Char): Boolean = escapeChars contains ch
+ def isQuoteStart(ch: Char): Boolean = quoteChars map (_._1) contains ch
+ def isQuoteEnd(ch: Char): Boolean = quoteChars map (_._2) contains ch
+}
diff --git a/src/compiler/scala/tools/nsc/interpreter/FileCompletion.scala b/src/compiler/scala/tools/nsc/interpreter/FileCompletion.scala
new file mode 100644
index 0000000000..c564562a63
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/interpreter/FileCompletion.scala
@@ -0,0 +1,54 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+/** TODO
+ * Spaces, dots, and other things in filenames are not correctly handled.
+ * space-escaping, knowing when we're inside quotes, etc. would be nice.
+ */
+
+import io.{ Directory, Path }
+
+/** This isn't 100% clean right now, but it works and is simple. Rather
+ * than delegate to new objects on each '/' in the path, we treat the
+ * buffer like a path and process it directly.
+ */
+object FileCompletion {
+ def executionFor(buffer: String): Option[Path] = {
+ val p = Path(buffer)
+ if (p.exists) Some(p) else None
+ }
+
+ private def fileCompletionForwarder(buffer: String, where: Directory): List[String] = {
+ completionsFor(where.path + buffer) map (_ stripPrefix where.path) toList
+ }
+
+ private def homeCompletions(buffer: String): List[String] = {
+ require(buffer startsWith "~/")
+ val home = Directory.Home getOrElse (return Nil)
+ fileCompletionForwarder(buffer.tail, home) map ("~" + _)
+ }
+ private def cwdCompletions(buffer: String): List[String] = {
+ require(buffer startsWith "./")
+ val cwd = Directory.Current getOrElse (return Nil)
+ fileCompletionForwarder(buffer.tail, cwd) map ("." + _)
+ }
+
+ def completionsFor(buffer: String): List[String] =
+ if (buffer startsWith "~/") homeCompletions(buffer)
+ else if (buffer startsWith "./") cwdCompletions(buffer)
+ else {
+ val p = Path(buffer)
+ val (dir, stub) =
+ // don't want /foo/. expanding "."
+ if (p.name == ".") (p.parent, ".")
+ else if (p.isDirectory) (p.toDirectory, "")
+ else (p.parent, p.name)
+
+ dir.list filter (_.name startsWith stub) map (_.path) toList
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/interpreter/History.scala b/src/compiler/scala/tools/nsc/interpreter/History.scala
new file mode 100644
index 0000000000..7bd4e89095
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/interpreter/History.scala
@@ -0,0 +1,36 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+import java.io.File
+import jline.{ ConsoleReader, History => JHistory }
+import scala.collection.JavaConversions.asBuffer
+import Properties.userHome
+
+/** Primarily, a wrapper for JLine's History.
+ */
+class History(val jhistory: JHistory) {
+ def asJavaList = jhistory.getHistoryList
+ def asList: List[String] = asBuffer(asJavaList).toList
+ def index = jhistory.getCurrentIndex
+
+ def grep(s: String) = asList filter (_ contains s)
+}
+
+object History {
+ val ScalaHistoryFile = ".scala_history"
+
+ def apply(reader: ConsoleReader): History =
+ if (reader == null) apply()
+ else new History(reader.getHistory)
+
+ def apply(): History = new History(
+ try new JHistory(new File(userHome, ScalaHistoryFile))
+ // do not store history if error
+ catch { case _: Exception => new JHistory() }
+ )
+}
diff --git a/src/compiler/scala/tools/nsc/interpreter/IdentCompletion.scala b/src/compiler/scala/tools/nsc/interpreter/IdentCompletion.scala
new file mode 100644
index 0000000000..b0152dbbc6
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/interpreter/IdentCompletion.scala
@@ -0,0 +1,25 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+/** Top level identifiers visible in the repl. It immediately
+ * delegates to an InstanceCompletion.
+ */
+class IdentCompletion(repl: Interpreter) extends CompletionAware {
+ val INTERPRETER_VAR_PREFIX = "res"
+
+ def completions() = repl.unqualifiedIds ::: List("classOf")
+ override def follow(id: String) =
+ // XXX this will be nice but needs solidifying.
+ // (repl completionAwareImplicit id) orElse
+ if (completions contains id) {
+ (repl completionAware id) orElse {
+ repl clazzForIdent id map (x => new InstanceCompletion(x))
+ }
+ }
+ else None
+}
diff --git a/src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala b/src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala
index 500876bf69..b3957e1062 100644
--- a/src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala
@@ -23,6 +23,13 @@ trait InteractiveReader {
catching(handler) { readOneLine(prompt) }
}
+ // override if history is available
+ def history: Option[History] = None
+ def historyList = history map (_.asList) getOrElse Nil
+
+ // override if completion is available
+ def completion: Option[Completion] = None
+
// hack necessary for OSX jvm suspension because read calls are not restarted after SIGTSTP
private def restartSystemCall(e: Exception): Boolean =
Properties.isMac && (e.getMessage == msgEINTR)
@@ -38,9 +45,12 @@ object InteractiveReader {
/** Create an interactive reader. Uses <code>JLineReader</code> if the
* library is available, but otherwise uses a <code>SimpleReader</code>.
*/
- def createDefault(interpreter: Interpreter, intLoop: InterpreterLoop = null): InteractiveReader =
- catching(exes: _*)
- . opt (new JLineReader(interpreter, intLoop))
- . getOrElse (new SimpleReader)
+ def createDefault(interpreter: Interpreter): InteractiveReader =
+ try new JLineReader(interpreter)
+ catch {
+ case e @ (_: Exception | _: NoClassDefFoundError) =>
+ // println("Failed to create JLineReader(%s): %s".format(interpreter, e))
+ new SimpleReader
+ }
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala b/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala
index b13b54a716..34367eacea 100644
--- a/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala
@@ -8,34 +8,21 @@ package scala.tools.nsc
package interpreter
import java.io.File
-import jline.{ History, ConsoleReader, ArgumentCompletor }
+import jline.{ ConsoleReader, ArgumentCompletor, History => JHistory }
/** Reads from the console using JLine */
-class JLineReader(interpreter: Interpreter, intLoop: InterpreterLoop) extends InteractiveReader {
- def this() = this(null, null)
- def this(interpreter: Interpreter) = this(interpreter, null)
- def history: History = consoleReader.getHistory
+class JLineReader(interpreter: Interpreter) extends InteractiveReader {
+ def this() = this(null)
- val consoleReader = {
- val history =
- try new History(new File(System.getProperty("user.home"), ".scala_history"))
- // do not store history if error
- catch { case _: Exception => new History() }
+ override lazy val history = Some(History(consoleReader))
+ override lazy val completion = Option(interpreter) map (x => new Completion(x))
+ val consoleReader = {
val r = new jline.ConsoleReader()
- r setHistory history
+ r setHistory (History().jhistory)
r setBellEnabled false
-
- if (interpreter != null) {
- // have to specify all delimiters for completion to work nicely
- val delims = new ArgumentCompletor.AbstractArgumentDelimiter {
- val delimChars = "(){}[],`;'\" \t".toArray
- def isDelimiterChar(s: String, pos: Int) = delimChars contains s.charAt(pos)
- }
- val comp = new ArgumentCompletor(new Completion(interpreter, intLoop), delims)
- comp setStrict false
- r addCompletor comp
- // XXX make this use a setting
+ completion foreach { c =>
+ r addCompletor c.jline
r setAutoprintThreshhold 250
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/LiteralCompletion.scala b/src/compiler/scala/tools/nsc/interpreter/LiteralCompletion.scala
new file mode 100644
index 0000000000..3b74549d27
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/interpreter/LiteralCompletion.scala
@@ -0,0 +1,50 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+import util.BatchSourceFile
+
+/** Literals, so we can pretend they are objects with methods.
+ */
+abstract class LiteralCompletion extends CompletionAware {
+ val parent: Completion
+ val global: Global
+
+ import global._
+
+ // TODO - figure out how to enumerate available implicit conversions.
+ // def richInt = new InstanceCompletion(classOf[scala.runtime.RichInt])
+
+ class PrimitiveCompletion(x: Type) extends CompletionAware {
+ lazy val completions = x.nonPrivateMembers map (_.name.toString)
+ override def follow(s: String) = {
+ val member = x.nonPrivateMembers find (_.name.toString == s)
+ member flatMap (m => Option(m.tpe)) map (_.resultType) map (x => new PrimitiveCompletion(x))
+ }
+ }
+
+ def simpleParse(code: String): Tree = {
+ val unit = new CompilationUnit(new BatchSourceFile("<console>", code))
+ val scanner = new syntaxAnalyzer.UnitParser(unit)
+
+ // only single statements
+ scanner.templateStatSeq(false) match {
+ case (_, List(t)) => t
+ case (_, x) => EmptyTree
+ }
+ }
+
+ def completions() = Nil
+ override def follow(id: String) = simpleParse(id) match {
+ case Literal(c @ Constant(_)) => Some(new PrimitiveCompletion(c.tpe))
+ // TODO - more AST trees.
+ // case Apply(fn @ Ident(name), args) =>
+ // classForName(name.toString) map (x => new StaticCompletion(x))
+ // None
+ case x => None
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/interpreter/PackageCompletion.scala b/src/compiler/scala/tools/nsc/interpreter/PackageCompletion.scala
new file mode 100644
index 0000000000..26ae4106c6
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/interpreter/PackageCompletion.scala
@@ -0,0 +1,187 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+import java.net.URL
+import java.lang.reflect
+import java.util.concurrent.ConcurrentHashMap
+import io.{ Path, Directory, File, Streamable }
+import scala.tools.util.PathResolver.Defaults.scalaHomeDir
+import scala.concurrent.DelayedLazyVal
+import scala.reflect.NameTransformer.{ decode, encode }
+import PackageCompletion._
+
+/** Completion among all known packages. It examines the jars in a
+ * separate thread so as not to slow down startup. If it arrives at
+ * an object, it delegates to StaticCompletion for that object.
+ */
+class PackageCompletion(classpath: List[URL]) extends CompletionAware {
+ // it takes a little while to look through the jars so we use a future and a concurrent map
+ class CompletionAgent {
+ val dottedPaths: ConcurrentHashMap[String, List[CompletionInfo]] = new ConcurrentHashMap[String, List[CompletionInfo]]
+ val topLevelPackages = new DelayedLazyVal(
+ () => enumToList(dottedPaths.keys) filterNot (_ contains '.'),
+ getDottedPaths(dottedPaths, classpath)
+ )
+ }
+ val agent = new CompletionAgent
+ import agent._
+
+ def completions() = topLevelPackages()
+ override def follow(id: String) =
+ if (dottedPaths containsKey id) Some(new SubCompletor(id))
+ else None
+
+ class SubCompletor(root: String) extends CompletionAware {
+ // Look for a type alias
+ private def aliasCompletor(path: String): Option[CompletionAware] =
+ for (name <- ByteCode aliasForType path ; clazz <- classForName(name + "$")) yield
+ new StaticCompletion(clazz)
+
+ lazy val pkgObject = classForName(root + ".package$") map (x => new PackageObjectCompletion(x))
+ def pkgObjectMembers = pkgObject map (_ completionsFor Parsed("")) getOrElse Nil
+
+ private def infos = Option(dottedPaths get root) getOrElse Nil
+ def completions() = {
+ val xs = infos map (_.visibleName) filterNot (_ == "package")
+ xs ::: pkgObjectMembers
+ }
+
+ override def follow(segment: String): Option[CompletionAware] = {
+ PackageCompletion.this.follow(root + "." + segment) orElse {
+ for (CompletionInfo(`segment`, className) <- infos ; clazz <- classForName(className)) {
+ return Some(new StaticCompletion(clazz))
+ }
+
+ aliasCompletor(root + "." + segment)
+ }
+ }
+ override def toString = "SubCompletor(%s)" format root
+ }
+}
+
+object PackageCompletion {
+ import java.util.jar.{ JarEntry, JarFile }
+
+ val EXPAND_SEPARATOR_STRING = "$$"
+ val ANON_CLASS_NAME = "$anon"
+ val TRAIT_SETTER_SEPARATOR_STRING = "$_setter_$"
+ val IMPL_CLASS_SUFFIX ="$class"
+
+ def ignoreClassName(x: String) =
+ (x contains EXPAND_SEPARATOR_STRING) ||
+ (x contains ANON_CLASS_NAME) ||
+ (x contains TRAIT_SETTER_SEPARATOR_STRING) ||
+ (x endsWith IMPL_CLASS_SUFFIX) ||
+ (x matches """.*\$\d+$""")
+
+ def enumToList[T](e: java.util.Enumeration[T]): List[T] = enumToListInternal(e, Nil)
+ private def enumToListInternal[T](e: java.util.Enumeration[T], xs: List[T]): List[T] =
+ if (e == null || !e.hasMoreElements) xs else enumToListInternal(e, e.nextElement :: xs)
+
+ private def isClass(s: String) = s endsWith ".class"
+ private def processNames(xs: List[String]) = xs map (_ dropRight 6) filterNot ignoreClassName distinct
+
+ def getDirClassFiles(dir: Directory): List[String] =
+ processNames(dir.deepList() map (dir relativize _ path) filter isClass toList)
+
+ def getJarClassFiles(jar: File): List[String] =
+ if (!jar.exists) Nil
+ else processNames(enumToList(new JarFile(jar.path).entries) map (_.getName) filter isClass)
+
+ object CompletionInfo {
+ def unapply(that: Any) = that match {
+ case x: CompletionInfo => Some((x.visibleName, x.className))
+ case _ => None
+ }
+ }
+
+ abstract class CompletionInfo {
+ def visibleName: String
+ def className: String
+ def getBytes(): Array[Byte]
+
+ override def hashCode = visibleName.hashCode
+ override def equals(other: Any) = other match {
+ case x: CompletionInfo => visibleName == x.visibleName
+ case _ => false
+ }
+ }
+
+ case class DirCompletionInfo(visibleName: String, className: String, dir: Directory) extends CompletionInfo {
+ lazy val file = dir / File(className)
+
+ def getBytes(): Array[Byte] = try file.toByteArray() catch { case _: Exception => Array() }
+ }
+
+ case class JarCompletionInfo(visibleName: String, className: String, jar: File) extends CompletionInfo {
+ lazy val jarfile = new JarFile(jar.path)
+ lazy val entry = jarfile getEntry className
+
+ def getBytes(): Array[Byte] = {
+ if (entry == null) Array() else {
+ val x = new Streamable.Bytes { def inputStream() = jarfile getInputStream entry }
+ x.toByteArray()
+ }
+ }
+ }
+
+ // all the dotted path to classfiles we can find by poking through the jars
+ def getDottedPaths(map: ConcurrentHashMap[String, List[CompletionInfo]], classpath: List[URL]): Unit = {
+ val cp = classpath.distinct map (x => Path(x.getPath))
+ val jars = cp filter (_ hasExtension "jar") map (_.toFile)
+
+ /** If we process all dirs uncritically, someone who has '.' in their classpath and
+ * runs scala from the filesystem root directory will induce a traversal of their
+ * entire filesystem. We could apply some heuristics to avoid this, but for now we
+ * will look only in the scalaHome directories, which is most of what we want.
+ */
+ def isUnderScalaHome(d: Directory) = d.parents exists (_ == scalaHomeDir)
+ val dirs = cp collect { case x: Directory => x } filter isUnderScalaHome
+
+ // for e.g. foo.bar.baz.C, returns (foo -> bar), (foo.bar -> baz), (foo.bar.baz -> C)
+ // and scala.Range$BigInt needs to go scala -> Range -> BigInt
+ def subpaths(s: String): List[(String, String)] = {
+ val segs = decode(s).split("""[/.]""")
+ val components = segs dropRight 1
+
+ (1 to components.length).toList flatMap { i =>
+ val k = components take i mkString "."
+ if (segs(i) contains "$") {
+ val dollarsegs = segs(i).split("$").toList
+ for (j <- 1 to (dollarsegs.length - 1) toList) yield {
+ val newk = k + "." + (dollarsegs take j mkString ".")
+ (k -> dollarsegs(j))
+ }
+ }
+ else List(k -> segs(i))
+ }
+ }
+
+ def addToMap(key: String, info: CompletionInfo) = {
+ if (map containsKey key) {
+ val vs = map.get(key)
+ if (vs contains info) ()
+ else map.put(key, info :: vs)
+ }
+ else map.put(key, List(info))
+ }
+
+ def oneDir(dir: Directory) {
+ for (cl <- getDirClassFiles(dir) ; (k, v) <- subpaths(cl))
+ addToMap(k, DirCompletionInfo(v, cl, dir))
+ }
+
+ def oneJar(jar: File) {
+ for (cl <- getJarClassFiles(jar) ; (k, v) <- subpaths(cl))
+ addToMap(k, JarCompletionInfo(v, cl, jar))
+ }
+
+ jars foreach oneJar
+ dirs foreach oneDir
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/interpreter/Parsed.scala b/src/compiler/scala/tools/nsc/interpreter/Parsed.scala
new file mode 100644
index 0000000000..b130396cc6
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/interpreter/Parsed.scala
@@ -0,0 +1,63 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+import jline.ArgumentCompletor.{ ArgumentDelimiter, ArgumentList }
+
+/** One instance of a command buffer.
+ */
+class Parsed private (
+ val buffer: String,
+ val cursor: Int,
+ val delimited: Char => Boolean
+) extends Delimited {
+ def isEmpty = buffer == ""
+ def isUnqualified = args.size == 1
+ def isQualified = args.size > 1
+ def isAtStart = cursor <= 0
+
+ def args = toArgs(buffer take cursor).toList
+ def bufferHead = args.head
+ def headLength = bufferHead.length + 1
+ def bufferTail = new Parsed(buffer drop headLength, cursor - headLength, delimited)
+
+ def prev = new Parsed(buffer, cursor - 1, delimited)
+ def next = new Parsed(buffer, cursor + 1, delimited)
+ def currentChar = buffer(cursor)
+ def currentArg = args.last
+ def position =
+ if (isEmpty) 0
+ else if (isLastDelimiter) cursor
+ else cursor - currentArg.length
+
+ def isFirstDelimiter = !isEmpty && isDelimiterChar(buffer.head)
+ def isLastDelimiter = !isEmpty && isDelimiterChar(buffer.last)
+ def firstIfDelimiter = if (isFirstDelimiter) buffer.head.toString else ""
+ def lastIfDelimiter = if (isLastDelimiter) buffer.last.toString else ""
+
+ def isQuoted = false // TODO
+ def isEscaped = !isAtStart && isEscapeChar(currentChar) && !isEscapeChar(prev.currentChar)
+ def isDelimiter = !isQuoted && !isEscaped && isDelimiterChar(currentChar)
+
+ def asJlineArgumentList =
+ if (isEmpty) new ArgumentList(Array[String](), 0, 0, cursor)
+ else new ArgumentList(args.toArray, args.size - 1, currentArg.length, cursor)
+
+ override def toString = "Parsed(%s / %d)".format(buffer, cursor)
+}
+
+object Parsed {
+ def apply(s: String): Parsed = apply(onull(s), onull(s).length)
+ def apply(s: String, cursor: Int): Parsed = apply(onull(s), cursor, "(){},`; \t" contains _)
+ def apply(s: String, cursor: Int, delimited: Char => Boolean): Parsed =
+ new Parsed(onull(s), cursor, delimited)
+
+ def dotted(s: String): Parsed = dotted(onull(s), onull(s).length)
+ def dotted(s: String, cursor: Int): Parsed = new Parsed(onull(s), cursor, _ == '.')
+
+ def undelimited(s: String, cursor: Int): Parsed = new Parsed(onull(s), cursor, _ => false)
+}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ProductCompletion.scala b/src/compiler/scala/tools/nsc/interpreter/ProductCompletion.scala
new file mode 100644
index 0000000000..2aaa6114c2
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/interpreter/ProductCompletion.scala
@@ -0,0 +1,42 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+class SeqCompletion[T](elems: Seq[T]) extends CompletionAware {
+ lazy val completions = elems.indices.toList map ("(%d)" format _)
+ private def elemAt(name: String) =
+ if (completions contains name) Some(elems(name drop 1 dropRight 1 toInt)) else None
+
+ override def execute(name: String) = elemAt(name)
+ override def follow(name: String) = elemAt(name) map (x => ProductCompletion(x))
+}
+
+/** TODO - deal with non-case products by giving them _1 _2 etc. */
+class ProductCompletion(root: Product) extends CompletionAware {
+ lazy val caseFields: List[Any] = root.productIterator.toList
+ lazy val caseNames: List[String] = ByteCode caseParamNamesForPath root.getClass.getName getOrElse Nil
+ private def isValid = caseFields.length == caseNames.length
+
+ private def fieldForName(s: String) = (completions indexOf s) match {
+ case idx if idx > -1 && isValid => Some(caseFields(idx))
+ case _ => None
+ }
+
+ lazy val completions = caseNames
+ override def execute(name: String) = fieldForName(name)
+ override def follow(name: String) = fieldForName(name) map (x => ProductCompletion(x))
+}
+
+object ProductCompletion {
+ /** TODO: other traversables. */
+ def apply(elem: Any): CompletionAware = elem match {
+ case x: Seq[_] => new SeqCompletion[Any](x)
+ case x: Product => new ProductCompletion(x)
+ // case x: Map[_, _] =>
+ case _ => CompletionAware.Empty
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReflectionCompletion.scala b/src/compiler/scala/tools/nsc/interpreter/ReflectionCompletion.scala
new file mode 100644
index 0000000000..89490119ff
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/interpreter/ReflectionCompletion.scala
@@ -0,0 +1,126 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+import java.lang.reflect
+import reflect.{ Modifier, AccessibleObject }
+import Modifier.{ isPrivate, isProtected, isStatic }
+import scala.reflect.NameTransformer
+import scala.collection.mutable.HashMap
+import ReflectionCompletion._
+import Completion.{ excludeMethods }
+
+trait ReflectionCompletion extends CompletionAware {
+ def clazz: Class[_]
+ protected def visibleMembers: List[AccessibleObject]
+ protected def memberCompletions = visibleMembers filter isPublic map reflectName
+
+ def reflectName(m: AccessibleObject) = m match {
+ case x: reflect.Method => x.getName
+ case x: reflect.Field => x.getName
+ case x => error(x.toString)
+ }
+ def isPublic(m: AccessibleObject) = m match {
+ case x: reflect.Method => Modifier isPublic x.getModifiers
+ case x: reflect.Field => Modifier isPublic x.getModifiers
+ case x => error(x.toString)
+ }
+
+ override def filterNotFunction(s: String): Boolean = {
+ (excludeMethods contains s) ||
+ (s contains "$$super") ||
+ (s == "MODULE$")
+ }
+
+ lazy val (staticMethods, instanceMethods) = clazz.getMethods.toList partition (x => isStatic(x.getModifiers))
+ lazy val (staticFields, instanceFields) = clazz.getFields.toList partition (x => isStatic(x.getModifiers))
+
+ /** Oops, mirror classes don't descend from scalaobject.
+ */
+ def isScalaClazz(cl: Class[_]) = {
+ (allInterfacesFor(cl) exists (_.getName == "scala.ScalaObject")) ||
+ (classForName(cl.getName + "$").isDefined)
+ }
+ def allInterfacesFor(cl: Class[_]): List[Class[_]] = allInterfacesFor(cl, Nil)
+
+ private def allInterfacesFor(cl: Class[_], acc: List[Class[_]]): List[Class[_]] = {
+ if (cl == null) acc.distinct
+ else allInterfacesFor(cl.getSuperclass, acc ::: cl.getInterfaces.toList)
+ }
+}
+
+/** An instance completion which hides a few useless members.
+ */
+class PackageObjectCompletion(clazz: Class[_]) extends InstanceCompletion(clazz) {
+ override lazy val completions = memberCompletions
+ override def filterNotFunction(s: String) = {
+ super.filterNotFunction(s) || (s == "getClass") || (s == "toString")
+ }
+}
+
+/** A completion aware object representing a single instance of some class.
+ * It completes to instance fields and methods, and delegates to another
+ * InstanceCompletion object if it can determine the result type of the element.
+ */
+class InstanceCompletion(val clazz: Class[_]) extends ReflectionCompletion {
+ protected def visibleMembers = instanceMethods ::: instanceFields
+ def extras = List("isInstanceOf", "asInstanceOf", "toString")
+ lazy val completions = memberCompletions ::: extras
+
+ val (zeroArg, otherArg) = instanceMethods partition (_.getParameterTypes.size == 0)
+ override def follow(id: String) = {
+ val nextClazz = zeroArg find (m => m.getName == id) map (_.getReturnType)
+ if (nextClazz.isDefined) nextClazz map (x => new InstanceCompletion(x))
+ else instanceFields find (_.getName == id) map (x => new InstanceCompletion(x.getType))
+ }
+}
+
+/** The complementary class to InstanceCompletion. It has logic to deal with
+ * java static members and scala companion object members.
+ */
+class StaticCompletion(val clazz: Class[_]) extends ReflectionCompletion {
+ protected def visibleMembers = whichMethods ::: whichFields
+ lazy val completions = memberCompletions
+
+ private def aliasForPath(path: String) = ByteCode aliasForType path flatMap (x => classForName(x + "$"))
+ def className = clazz.getName
+ def isJava = !isScalaClazz(clazz)
+
+ private def whichMethods = if (isJava) staticMethods else instanceMethods
+ private def whichFields = if (isJava) staticFields else instanceFields
+ val (zeroArg, otherArg) = whichMethods partition (_.getParameterTypes.size == 0)
+
+ override def follow(id: String) = {
+ val nextClazz = zeroArg find (m => m.getName == id) map (_.getReturnType)
+ if (nextClazz.isDefined) nextClazz map (x => new InstanceCompletion(x))
+ else staticFields find (_.getName == id) map (x => new InstanceCompletion(x.getType))
+ }
+
+ override def toString = "StaticCompletion(%s) => %s".format(clazz.getName, completions)
+}
+
+object ReflectionCompletion {
+ import java.io.File
+ import java.util.jar.{ JarEntry, JarFile }
+ import scala.tools.nsc.io.Streamable
+
+ // XXX at the moment this is imperfect because scala's protected semantics
+ // differ from java's, so protected methods appear public via reflection;
+ // yet scala enforces the protection. The result is that protected members
+ // appear in completion yet cannot actually be called. Fixing this
+ // properly requires a scala.reflect.* API. Fixing it uglily is possible
+ // too (cast to structural type!) but I deem poor use of energy.
+ private def skipModifiers(m: reflect.Method) = {
+ import java.lang.reflect.Modifier._
+ val flags = STATIC | PRIVATE | PROTECTED
+ (m.getModifiers & flags) == 0
+ }
+ private def getAnyClass(x: Any): Class[_] = x.asInstanceOf[AnyRef].getClass
+
+ def methodsOf(target: Any): List[String] =
+ getAnyClass(target).getMethods filter skipModifiers map (_.getName) toList
+}
diff --git a/src/compiler/scala/tools/nsc/interpreter/SimpleReader.scala b/src/compiler/scala/tools/nsc/interpreter/SimpleReader.scala
index bca2e18e39..9d604ab8b3 100644
--- a/src/compiler/scala/tools/nsc/interpreter/SimpleReader.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/SimpleReader.scala
@@ -8,6 +8,7 @@ package scala.tools.nsc
package interpreter
import java.io.{ BufferedReader, PrintWriter }
+import io.{ Path, File, Directory }
/** Reads using standard JDK API */
class SimpleReader(
@@ -16,7 +17,9 @@ class SimpleReader(
val interactive: Boolean)
extends InteractiveReader {
def this() = this(Console.in, new PrintWriter(Console.out), true)
+ def this(in: File, out: PrintWriter, interactive: Boolean) = this(in.bufferedReader(), out, interactive)
+ def close() = in.close()
def readOneLine(prompt: String): String = {
if (interactive) {
out.print(prompt)
diff --git a/src/compiler/scala/tools/nsc/interpreter/XMLCompletion.scala b/src/compiler/scala/tools/nsc/interpreter/XMLCompletion.scala
new file mode 100644
index 0000000000..67063192bd
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/interpreter/XMLCompletion.scala
@@ -0,0 +1,43 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+import xml.{ XML, Group, Node, NodeSeq }
+import XMLCompletion._
+import scala.collection.mutable.HashMap
+
+class XMLCompletion(root: Node) extends CompletionAware {
+ private val nodeCache = new HashMap[String, Node]
+ private def getNode(s: String): Option[Node] = {
+ completions // make sure cache is populated
+ nodeCache get s
+ }
+
+ lazy val completions: List[String] = {
+ def children = root.child.toList
+ def uniqueTags = children groupBy (_.label) filter (_._2.size == 1) map (_._1)
+ val uniqs = uniqueTags.toList
+
+ children.foldLeft(List[String]())((res, node) => {
+ val name = node.label
+ def count = res filter (_ startsWith (name + "[")) size // ]
+ val suffix = if (uniqs contains name) "" else "[%d]" format (count + 1)
+ val s = name + suffix
+
+ nodeCache(s) = node
+
+ s :: res
+ }).sorted
+ }
+
+ override def execute(id: String) = getNode(id)
+ override def follow(id: String) = getNode(id) map (x => new XMLCompletion(x))
+}
+
+object XMLCompletion {
+ def apply(x: Node) = new XMLCompletion(x)
+}
diff --git a/src/compiler/scala/tools/nsc/interpreter/package.scala b/src/compiler/scala/tools/nsc/interpreter/package.scala
new file mode 100644
index 0000000000..2ded3a7900
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/interpreter/package.scala
@@ -0,0 +1,25 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+
+package object interpreter {
+ /** Apply a function and return the passed value */
+ def returning[T](x: T)(f: T => Unit): T = { f(x) ; x }
+
+ /** Tracing */
+ def tracing[T](msg: String)(x: T): T = { println("(" + msg + ") " + x) ; x }
+
+ /** Frequency counter */
+ def freq[T](seq: Seq[T]) = seq groupBy identity mapValues (_.length)
+
+ /** null becomes "", otherwise identity */
+ def onull(s: String) = if (s == null) "" else s
+
+ /** Class objects */
+ def classForName(name: String): Option[Class[_]] =
+ try Some(Class forName name)
+ catch { case _: ClassNotFoundException | _: SecurityException => None }
+}
diff --git a/src/compiler/scala/tools/nsc/io/AbstractFile.scala b/src/compiler/scala/tools/nsc/io/AbstractFile.scala
index 31073a0799..079b33c2a2 100644
--- a/src/compiler/scala/tools/nsc/io/AbstractFile.scala
+++ b/src/compiler/scala/tools/nsc/io/AbstractFile.scala
@@ -20,8 +20,6 @@ import scala.collection.mutable.ArrayBuffer
*/
object AbstractFile
{
- def isJarOrZip(f: Path) = cond(f.extension) { case "zip" | "jar" => true }
-
/** Returns "getFile(new File(path))". */
def getFile(path: String): AbstractFile = getFile(Path(path))
def getFile(path: Path): AbstractFile = getFile(path.toFile)
@@ -46,7 +44,7 @@ object AbstractFile
*/
def getDirectory(file: File): AbstractFile =
if (file.isDirectory) new PlainFile(file)
- else if (file.isFile && isJarOrZip(file)) ZipArchive fromFile file
+ else if (file.isFile && Path.isJarOrZip(file)) ZipArchive fromFile file
else null
/**
@@ -58,7 +56,7 @@ object AbstractFile
* @return ...
*/
def getURL(url: URL): AbstractFile =
- Option(url) partialMap { case url: URL if isJarOrZip(url.getPath) => ZipArchive fromURL url } orNull
+ Option(url) collect { case url: URL if Path.isJarOrZip(url.getPath) => ZipArchive fromURL url } orNull
}
/**
@@ -93,6 +91,9 @@ abstract class AbstractFile extends AnyRef with Iterable[AbstractFile] {
/** Returns the path of this abstract file. */
def path: String
+ /** Checks extension case insensitively. */
+ def hasExtension(other: String) = Path(path) hasExtension other
+
/** The absolute file, if this is a relative file. */
def absolute: AbstractFile
@@ -101,13 +102,19 @@ abstract class AbstractFile extends AnyRef with Iterable[AbstractFile] {
/** Returns the underlying File if any and null otherwise. */
def file: JFile
- def sfile = File(file) // XXX
+ def sfile = Option(file) map (x => File(x)) // XXX
+
+ /** An underlying source, if known. Mostly, a zip/jar file. */
+ def underlyingSource: Option[AbstractFile] = None
/** Does this abstract file denote an existing file? */
def exists: Boolean =
if (file ne null) file.exists
else true
+ /** Does this abstract file represent something which can contain classfiles? */
+ def isClassContainer = isDirectory || (sfile exists (Path isJarOrZip _))
+
/** Create a file on disk, if one does not exist already. */
def create: Unit
@@ -225,7 +232,7 @@ abstract class AbstractFile extends AnyRef with Iterable[AbstractFile] {
*/
def fileNamed(name: String): AbstractFile = {
assert(isDirectory)
- Option(lookupName(name, false)) getOrElse new PlainFile((sfile / name).createFile())
+ Option(lookupName(name, false)) getOrElse new PlainFile((sfile.get / name).createFile())
}
/**
@@ -234,9 +241,12 @@ abstract class AbstractFile extends AnyRef with Iterable[AbstractFile] {
*/
def subdirectoryNamed(name: String): AbstractFile = {
assert (isDirectory)
- Option(lookupName(name, true)) getOrElse new PlainFile((sfile / name).createDirectory())
+ Option(lookupName(name, true)) getOrElse new PlainFile((sfile.get / name).createDirectory())
}
+ protected def unsupported(): Nothing = unsupported(null)
+ protected def unsupported(msg: String): Nothing = throw new UnsupportedOperationException(msg)
+
/** Returns the path of this abstract file. */
override def toString() = path
diff --git a/src/compiler/scala/tools/nsc/io/Directory.scala b/src/compiler/scala/tools/nsc/io/Directory.scala
index 3431a49e2c..7c279a79e2 100644
--- a/src/compiler/scala/tools/nsc/io/Directory.scala
+++ b/src/compiler/scala/tools/nsc/io/Directory.scala
@@ -10,11 +10,16 @@ package scala.tools.nsc
package io
import java.io.{ File => JFile }
-import collection.Traversable
-object Directory
-{
- def apply(path: Path) = path.toDirectory
+object Directory {
+ import scala.util.Properties.{ tmpDir, userHome, userDir }
+
+ private def normalizePath(s: String) = Some(apply(Path(s).normalize))
+ def Current: Option[Directory] = if (userDir == "") None else normalizePath(userDir)
+ def Home: Option[Directory] = if (userHome == "") None else normalizePath(userHome)
+ def TmpDir: Option[Directory] = if (tmpDir == "") None else normalizePath(tmpDir)
+
+ def apply(path: Path): Directory = path.toDirectory
// Like File.makeTemp but creates a directory instead
def makeTemp(prefix: String = Path.randomPrefix, suffix: String = null, dir: JFile = null): Directory = {
@@ -30,11 +35,12 @@ import Path._
* @author Paul Phillips
* @since 2.8
*/
-class Directory(jfile: JFile) extends Path(jfile)
-{
+class Directory(jfile: JFile) extends Path(jfile) {
+ override def toAbsolute: Directory = if (isAbsolute) this else super.toAbsolute.toDirectory
override def toDirectory: Directory = this
override def toFile: File = new File(jfile)
override def isValid = jfile.isDirectory() || !jfile.exists()
+ override def normalize: Directory = super.normalize.toDirectory
/** An iterator over the contents of this directory.
*/
@@ -44,8 +50,14 @@ class Directory(jfile: JFile) extends Path(jfile)
case xs => xs.iterator map Path.apply
}
- def dirs: Iterator[Directory] = list partialMap { case x: Directory => x }
- def files: Iterator[File] = list partialMap { case x: File => x }
+ def dirs: Iterator[Directory] = list collect { case x: Directory => x }
+ def files: Iterator[File] = list collect { case x: File => x }
+
+ override def walkFilter(cond: Path => Boolean): Iterator[Path] =
+ list filter cond flatMap (_ walkFilter cond)
+
+ def deepDirs: Iterator[Directory] = Path.onlyDirs(deepList())
+ def deepFiles: Iterator[File] = Path.onlyFiles(deepList())
/** If optional depth argument is not given, will recurse
* until it runs out of contents.
@@ -59,7 +71,7 @@ class Directory(jfile: JFile) extends Path(jfile)
* to the (optionally) given depth.
*/
def subdirs(depth: Int = 1): Iterator[Directory] =
- deepList(depth) partialMap { case x: Directory => x }
+ deepList(depth) collect { case x: Directory => x }
/** Deletes the directory recursively. Returns false on failure.
* Use with caution!
@@ -72,6 +84,4 @@ class Directory(jfile: JFile) extends Path(jfile)
}
f.delete()
}
-
- override def toString() = "Directory(%s)".format(path)
}
diff --git a/src/compiler/scala/tools/nsc/io/File.scala b/src/compiler/scala/tools/nsc/io/File.scala
index 7f5f535da4..887bf4b55d 100644
--- a/src/compiler/scala/tools/nsc/io/File.scala
+++ b/src/compiler/scala/tools/nsc/io/File.scala
@@ -13,14 +13,13 @@ package io
import java.io.{
FileInputStream, FileOutputStream, BufferedReader, BufferedWriter, InputStreamReader, OutputStreamWriter,
- BufferedInputStream, BufferedOutputStream, IOException, File => JFile }
-import java.nio.channels.FileChannel
-import collection.Traversable
+ BufferedInputStream, BufferedOutputStream, IOException, PrintStream, File => JFile }
+import java.nio.channels.{ Channel, FileChannel }
import scala.io.Codec
-object File
-{
+object File {
def pathSeparator = JFile.pathSeparator
+ def separator = JFile.separator
def apply(path: Path)(implicit codec: Codec = null) =
if (codec != null) new File(path.jfile)(codec)
@@ -30,11 +29,21 @@ object File
def makeTemp(prefix: String = Path.randomPrefix, suffix: String = null, dir: JFile = null) =
apply(JFile.createTempFile(prefix, suffix, dir))
- import java.nio.channels.Channel
type Closeable = { def close(): Unit }
def closeQuietly(target: Closeable) {
try target.close() catch { case e: IOException => }
}
+
+ // this is a workaround for http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6503430
+ // we are using a static initializer to statically initialize a java class so we don't
+ // trigger java.lang.InternalErrors later when using it concurrently.
+ {
+ val tmp = JFile.createTempFile("bug6503430", null, null)
+ val in = new FileInputStream(tmp).getChannel()
+ val out = new FileOutputStream(tmp, true).getChannel()
+ out.transferFrom(in, 0, 0)
+ ()
+ }
}
import File._
import Path._
@@ -42,7 +51,7 @@ import Path._
/** An abstraction for files. For character data, a Codec
* can be supplied at either creation time or when a method
* involving character data is called (with the latter taking
- * precdence if supplied.) If neither is available, the value
+ * precedence if supplied.) If neither is available, the value
* of scala.io.Codec.default is used.
*
* @author Paul Phillips
@@ -50,14 +59,17 @@ import Path._
*/
class File(jfile: JFile)(implicit val creationCodec: Codec = null)
extends Path(jfile)
-with Streamable.Chars
-{
+with Streamable.Chars {
def withCodec(codec: Codec): File = new File(jfile)(codec)
+ override def addExtension(ext: String): File = super.addExtension(ext).toFile
+ override def toAbsolute: File = if (isAbsolute) this else super.toAbsolute.toFile
override def toDirectory: Directory = new Directory(jfile)
override def toFile: File = this
-
+ override def normalize: File = super.normalize.toFile
override def isValid = jfile.isFile() || !jfile.exists()
override def length = super[Path].length
+ override def walkFilter(cond: Path => Boolean): Iterator[Path] =
+ if (cond(this)) Iterator.single(this) else Iterator.empty
/** Obtains an InputStream. */
def inputStream() = new FileInputStream(jfile)
@@ -65,6 +77,7 @@ with Streamable.Chars
/** Obtains a OutputStream. */
def outputStream(append: Boolean = false) = new FileOutputStream(jfile, append)
def bufferedOutput(append: Boolean = false) = new BufferedOutputStream(outputStream(append))
+ def printStream(append: Boolean = false) = new PrintStream(bufferedOutput(append))
/** Obtains an OutputStreamWriter wrapped around a FileOutputStream.
* This should behave like a less broken version of java.io.FileWriter,
@@ -78,15 +91,21 @@ with Streamable.Chars
def bufferedWriter(append: Boolean = false, codec: Codec = getCodec()) =
new BufferedWriter(writer(append, codec))
- /** Writes all the Strings in the given iterator to the file. */
- def writeAll(xs: Traversable[String], append: Boolean = false, codec: Codec = getCodec()): Unit = {
- val out = bufferedWriter(append, codec)
- try xs foreach (out write _)
+ /** Creates a new file and writes all the Strings to it. */
+ def writeAll(strings: String*): Unit = {
+ val out = bufferedWriter()
+ try strings foreach (out write _)
finally out close
}
- def copyFile(destPath: Path, preserveFileDate: Boolean = false) = {
- val FIFTY_MB = 1024 * 1024 * 50
+ def appendAll(strings: String*): Unit = {
+ val out = bufferedWriter(append = true)
+ try strings foreach (out write _)
+ finally out close
+ }
+
+ def copyTo(destPath: Path, preserveFileDate: Boolean = false): Boolean = {
+ val CHUNK = 1024 * 1024 * 16 // 16 MB
val dest = destPath.toFile
if (!isValid) fail("Source %s is not a valid file." format name)
if (this.normalize == dest.normalize) fail("Source and destination are the same.")
@@ -103,7 +122,7 @@ with Streamable.Chars
val size = in.size()
var pos, count = 0L
while (pos < size) {
- count = (size - pos) min FIFTY_MB
+ count = (size - pos) min CHUNK
pos += out.transferFrom(in, pos, count)
}
}
@@ -115,8 +134,6 @@ with Streamable.Chars
if (preserveFileDate)
dest.lastModified = this.lastModified
- ()
+ true
}
-
- override def toString() = "File(%s)".format(path)
}
diff --git a/src/compiler/scala/tools/nsc/io/Path.scala b/src/compiler/scala/tools/nsc/io/Path.scala
index de1d129e00..6b88f3cfca 100644
--- a/src/compiler/scala/tools/nsc/io/Path.scala
+++ b/src/compiler/scala/tools/nsc/io/Path.scala
@@ -7,7 +7,7 @@ package io
import java.io.{
FileInputStream, FileOutputStream, BufferedReader, BufferedWriter, InputStreamReader, OutputStreamWriter,
- BufferedInputStream, BufferedOutputStream, File => JFile }
+ BufferedInputStream, BufferedOutputStream, RandomAccessFile, File => JFile }
import java.net.{ URI, URL }
import collection.{ Seq, Traversable }
import PartialFunction._
@@ -30,6 +30,16 @@ import scala.util.Random.nextASCIIString
object Path
{
+ private val ZipMagicNumber = List[Byte](80, 75, 3, 4)
+
+ /** If examineFile is true, it will look at the first four bytes of the file
+ * and see if the magic number indicates it may be a jar or zip.
+ */
+ private def magicNumberIsZip(f: Path) = f.isFile && (f.toFile.bytes().take(4).toList == ZipMagicNumber)
+ def isJarOrZip(f: Path): Boolean = isJarOrZip(f, false)
+ def isJarOrZip(f: Path, examineFile: Boolean): Boolean =
+ f.hasExtension("zip", "jar") || (examineFile && magicNumberIsZip(f))
+
// not certain these won't be problematic, but looks good so far
implicit def string2path(s: String): Path = apply(s)
implicit def jfile2path(jfile: JFile): Path = apply(jfile)
@@ -47,8 +57,14 @@ object Path
// true
// }
+ def onlyDirs(xs: Iterator[Path]): Iterator[Directory] = xs filter (_.isDirectory) map (_.toDirectory)
+ def onlyDirs(xs: List[Path]): List[Directory] = xs filter (_.isDirectory) map (_.toDirectory)
+ def onlyFiles(xs: Iterator[Path]): Iterator[File] = xs filter (_.isFile) map (_.toFile)
+ def onlyFiles(xs: List[Path]): List[File] = xs filter (_.isFile) map (_.toFile)
+
def roots: List[Path] = JFile.listRoots().toList map Path.apply
+ def apply(segments: Seq[String]): Path = apply(segments mkString JFile.separator)
def apply(path: String): Path = apply(new JFile(path))
def apply(jfile: JFile): Path =
if (jfile.isFile) new File(jfile)
@@ -63,8 +79,7 @@ import Path._
/** The Path constructor is private so we can enforce some
* semantics regarding how a Path might relate to the world.
*/
-class Path private[io] (val jfile: JFile)
-{
+class Path private[io] (val jfile: JFile) {
val separator = JFile.separatorChar
val separatorStr = JFile.separator
@@ -80,6 +95,10 @@ class Path private[io] (val jfile: JFile)
def toAbsolute: Path = if (isAbsolute) this else Path(jfile.getAbsolutePath())
def toURI: URI = jfile.toURI()
def toURL: URL = toURI.toURL()
+ /** If this path is absolute, returns it: otherwise, returns an absolute
+ * path made up of root / this.
+ */
+ def toAbsoluteWithRoot(root: Path) = if (isAbsolute) this else root.toAbsolute / this
/** Creates a new Path with the specified path appended. Assumes
* the type of the new component implies the type of the result.
@@ -88,10 +107,27 @@ class Path private[io] (val jfile: JFile)
def /(child: Directory): Directory = /(child: Path).toDirectory
def /(child: File): File = /(child: Path).toFile
+ /** If this path is a container, recursively iterate over its contents.
+ * The supplied condition is a filter which is applied to each element,
+ * with that branch of the tree being closed off if it is true. So for
+ * example if the condition is true for some subdirectory, nothing
+ * under that directory will be in the Iterator; but otherwise each
+ * file and subdirectory underneath it will appear.
+ */
+ def walkFilter(cond: Path => Boolean): Iterator[Path] =
+ if (isFile) toFile walkFilter cond
+ else if (isDirectory) toDirectory walkFilter cond
+ else Iterator.empty
+
+ /** Equivalent to walkFilter(_ => false).
+ */
+ def walk: Iterator[Path] = walkFilter(_ => true)
+
// identity
def name: String = jfile.getName()
def path: String = jfile.getPath()
def normalize: Path = Path(jfile.getCanonicalPath())
+ def isRootPath: Boolean = roots exists (_ isSame this)
def resolve(other: Path) = if (other.isAbsolute || isEmpty) other else /(other)
def relativize(other: Path) = {
@@ -113,19 +149,21 @@ class Path private[io] (val jfile: JFile)
/**
* @return The path of the parent directory, or root if path is already root
*/
- def parent: Path = {
- val p = path match {
- case "" | "." => ".."
- case _ if path endsWith ".." => path + separator + ".." // the only solution
- case _ => jfile.getParent match {
- case null if isAbsolute => path // it should be a root. BTW, don't need to worry about relative pathed root
- case null => "." // a file ot dir under pwd
- case x => x
- }
- }
- new Directory(new JFile(p))
+ def parent: Directory = path match {
+ case "" | "." => Directory("..")
+ case _ =>
+ // the only solution <-- a comment which could have used elaboration
+ if (segments.nonEmpty && segments.last == "..")
+ (path / "..").toDirectory
+ else jfile.getParent match {
+ case null =>
+ if (isAbsolute) toDirectory // it should be a root. BTW, don't need to worry about relative pathed root
+ else Directory(".") // a dir under pwd
+ case x =>
+ Directory(x)
+ }
}
- def parents: List[Path] = {
+ def parents: List[Directory] = {
val p = parent
if (p isSame this) Nil else p :: p.parents
}
@@ -134,6 +172,21 @@ class Path private[io] (val jfile: JFile)
case -1 => ""
case idx => name drop (idx + 1)
}
+ // compares against extensions in a CASE INSENSITIVE way.
+ def hasExtension(ext: String, exts: String*) = {
+ val xs = (ext +: exts) map (_.toLowerCase)
+ xs contains extension.toLowerCase
+ }
+ // returns the filename without the extension.
+ def stripExtension: String = name stripSuffix ("." + extension)
+ // returns the Path with the extension.
+ def addExtension(ext: String): Path = Path(path + "." + ext)
+ // changes the existing extension out for a new one
+ def changeExtension(ext: String): Path = Path((path stripSuffix extension) + ext)
+
+ // conditionally execute
+ def ifFile[T](f: File => T): Option[T] = if (isFile) Some(f(toFile)) else None
+ def ifDirectory[T](f: Directory => T): Option[T] = if (isDirectory) Some(f(toDirectory)) else None
// Boolean tests
def canRead = jfile.canRead()
@@ -179,12 +232,25 @@ class Path private[io] (val jfile: JFile)
// deletions
def delete() = jfile.delete()
def deleteIfExists() = if (jfile.exists()) delete() else false
+ def truncate() =
+ isFile && {
+ val raf = new RandomAccessFile(jfile, "rw")
+ raf setLength 0
+ raf.close()
+ length == 0
+ }
+
+ def touch(modTime: Long = System.currentTimeMillis) = {
+ createFile()
+ if (isFile)
+ lastModified = modTime
+ }
// todo
// def copyTo(target: Path, options ...): Boolean
// def moveTo(target: Path, options ...): Boolean
- override def toString() = "Path(%s)".format(path)
+ override def toString() = path
override def equals(other: Any) = other match {
case x: Path => path == x.path
case _ => false
diff --git a/src/compiler/scala/tools/nsc/io/PlainFile.scala b/src/compiler/scala/tools/nsc/io/PlainFile.scala
index 5336c4d04d..a40d01d1f5 100644
--- a/src/compiler/scala/tools/nsc/io/PlainFile.scala
+++ b/src/compiler/scala/tools/nsc/io/PlainFile.scala
@@ -27,6 +27,8 @@ class PlainFile(val givenPath: Path) extends AbstractFile {
assert(path ne null)
val file = givenPath.jfile
+ override def underlyingSource = Some(this)
+
private val fpath = try givenPath.normalize catch { case _: IOException => givenPath.toAbsolute }
/** Returns the name of this abstract file. */
@@ -55,8 +57,8 @@ class PlainFile(val givenPath: Path) extends AbstractFile {
/** Returns all abstract subfiles of this abstract directory. */
def iterator: Iterator[AbstractFile] = {
- assert(isDirectory, "not a directory '%s'" format this)
- givenPath.toDirectory.list filter (_.exists) map (new PlainFile(_))
+ if (!isDirectory) Iterator.empty
+ else givenPath.toDirectory.list filter (_.exists) map (new PlainFile(_))
}
/**
diff --git a/src/compiler/scala/tools/nsc/io/Process.scala b/src/compiler/scala/tools/nsc/io/Process.scala
index 66ed123658..746c5f1a8d 100644
--- a/src/compiler/scala/tools/nsc/io/Process.scala
+++ b/src/compiler/scala/tools/nsc/io/Process.scala
@@ -6,7 +6,7 @@ package scala.tools.nsc
package io
import concurrent.ThreadRunner
-import scala.util.Properties.{ isWin, isMac }
+import scala.util.Properties.{ isWin, isMac, lineSeparator }
import scala.util.control.Exception.catching
import java.lang.{ Process => JProcess, ProcessBuilder => JProcessBuilder }
import java.io.{ IOException, InputStream, OutputStream, BufferedReader, InputStreamReader, PrintWriter, File => JFile }
@@ -34,7 +34,11 @@ import java.util.concurrent.LinkedBlockingQueue
object Process
{
- lazy val javaVmArguments = java.lang.management.ManagementFactory.getRuntimeMXBean().getInputArguments()
+ def javaVmArguments: List[String] = {
+ import collection.JavaConversions._
+
+ java.lang.management.ManagementFactory.getRuntimeMXBean().getInputArguments().toList
+ }
lazy val runtime = Runtime.getRuntime()
class Pipe[T](xs: Seq[T], stringify: T => String) {
@@ -42,7 +46,6 @@ object Process
val p = Process(cmd)
xs foreach (x => p.stdin println stringify(x))
p.stdin.close()
- p.stdin.flush()
p.stdout.toList
}
}
@@ -64,8 +67,7 @@ object Process
}
}
- private[Process] class ProcessBuilder(val pb: JProcessBuilder)
- {
+ private[Process] class ProcessBuilder(val pb: JProcessBuilder) {
def this(cmd: String*) = this(new JProcessBuilder(cmd: _*))
def start() = new Process(() => pb.start())
@@ -82,7 +84,7 @@ object Process
this
}
- def withCwd(cwd: File): this.type = {
+ def withCwd(cwd: Path): this.type = {
if (cwd != null)
pb directory cwd.jfile
@@ -109,10 +111,10 @@ object Process
def apply(
command: String,
env: Map[String, String] = null,
- cwd: File = null,
+ cwd: Path = null,
redirect: Boolean = false
): Process =
- exec(shell(command), env, cwd)
+ exec(shell(command), env, cwd, redirect)
/** Executes the given command line.
*
@@ -122,15 +124,14 @@ object Process
def exec(
command: Seq[String],
env: Map[String, String] = null,
- cwd: File = null,
+ cwd: Path = null,
redirect: Boolean = false
): Process =
- new ProcessBuilder(command: _*) withEnv env withCwd cwd start
+ new ProcessBuilder(command: _*) withEnv env withCwd cwd withRedirectedErrorStream redirect start
}
import Process._
-class Process(processCreator: () => JProcess) extends Iterable[String]
-{
+class Process(processCreator: () => JProcess) extends Iterable[String] {
lazy val process = processCreator()
def exitValue(): Option[Int] =
@@ -140,6 +141,7 @@ class Process(processCreator: () => JProcess) extends Iterable[String]
def destroy() = process.destroy()
def rerun() = new Process(processCreator)
+ def slurp() = _out.slurp()
def stdout = iterator
def iterator = _out.iterator
def stderr = _err.iterator
@@ -149,6 +151,11 @@ class Process(processCreator: () => JProcess) extends Iterable[String]
private val queue = new LinkedBlockingQueue[String]
private val reader = new BufferedReader(new InputStreamReader(in))
+ def slurp(): String = {
+ join()
+ queue.toArray map (_ + lineSeparator) mkString
+ }
+
def iterator = {
join() // make sure this thread is complete
new Iterator[String] {
@@ -160,6 +167,7 @@ class Process(processCreator: () => JProcess) extends Iterable[String]
override def run() {
reader.readLine match {
case null =>
+ in.close()
case x =>
queue put x
run()
diff --git a/src/compiler/scala/tools/nsc/io/Socket.scala b/src/compiler/scala/tools/nsc/io/Socket.scala
new file mode 100644
index 0000000000..e883c71b8e
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/io/Socket.scala
@@ -0,0 +1,46 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package io
+
+import java.io.{ IOException, InputStreamReader, BufferedReader, PrintWriter }
+import java.net.{ URL, MalformedURLException }
+import java.net.{ InetAddress, Socket => JSocket }
+import scala.util.control.Exception._
+
+/** A skeletal only-as-much-as-I-need Socket wrapper.
+ */
+object Socket
+{
+ private val socketExceptions = List(classOf[IOException], classOf[SecurityException])
+
+ class SocketBox(f: () => Socket) {
+ def either: Either[Throwable, Socket] = catching(socketExceptions: _*) either f()
+ def opt: Option[Socket] = catching(socketExceptions: _*) opt f()
+ }
+
+ def apply(host: InetAddress, port: Int) = new SocketBox(() => new Socket(new JSocket(host, port)))
+ def apply(host: String, port: Int) = new SocketBox(() => new Socket(new JSocket(host, port)))
+}
+
+class Socket(jsocket: JSocket) {
+ def getOutputStream() = jsocket.getOutputStream()
+ def getInputStream() = jsocket.getInputStream()
+ def getPort() = jsocket.getPort()
+ def close() = jsocket.close()
+
+ /** Creates an InputStream and applies the closure, automatically closing it on completion.
+ */
+ def applyReaderAndWriter[T](f: (BufferedReader, PrintWriter) => T): T = {
+ val out = new PrintWriter(getOutputStream(), true)
+ val in = new BufferedReader(new InputStreamReader(getInputStream()))
+ try f(in, out)
+ finally {
+ in.close()
+ out.close()
+ }
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/io/SourceReader.scala b/src/compiler/scala/tools/nsc/io/SourceReader.scala
index bb5e662ab7..ccd946d0b6 100644
--- a/src/compiler/scala/tools/nsc/io/SourceReader.scala
+++ b/src/compiler/scala/tools/nsc/io/SourceReader.scala
@@ -65,7 +65,7 @@ class SourceReader(decoder: CharsetDecoder, reporter: Reporter) {
case p:PlainFile =>
read(p.file) // bq: (!!!)
case z:ZipArchive#FileEntry =>
- val c = Channels.newChannel(z.getArchive.getInputStream(z.entry))
+ val c = Channels.newChannel(z.archive.getInputStream(z.entry))
read(c)
case _ =>
val b = ByteBuffer.wrap(file.toByteArray)
diff --git a/src/compiler/scala/tools/nsc/io/Streamable.scala b/src/compiler/scala/tools/nsc/io/Streamable.scala
index 4dc0745534..ff4520e3ca 100644
--- a/src/compiler/scala/tools/nsc/io/Streamable.scala
+++ b/src/compiler/scala/tools/nsc/io/Streamable.scala
@@ -101,6 +101,14 @@ object Streamable
*/
def bufferedReader(codec: Codec = getCodec()) = new BufferedReader(reader(codec))
+ /** Creates a BufferedReader and applies the closure, automatically closing it on completion.
+ */
+ def applyReader[T](f: BufferedReader => T): T = {
+ val in = bufferedReader()
+ try f(in)
+ finally in.close()
+ }
+
/** Convenience function to import entire file into a String.
*/
def slurp(codec: Codec = getCodec()) = chars(codec).mkString
diff --git a/src/compiler/scala/tools/nsc/io/VirtualDirectory.scala b/src/compiler/scala/tools/nsc/io/VirtualDirectory.scala
index 5675b264ac..b4b1eca439 100644
--- a/src/compiler/scala/tools/nsc/io/VirtualDirectory.scala
+++ b/src/compiler/scala/tools/nsc/io/VirtualDirectory.scala
@@ -31,26 +31,21 @@ extends AbstractFile {
override def output = error("directories cannot be written")
/** Does this abstract file denote an existing file? */
- def create {
- throw new UnsupportedOperationException
- }
+ def create { unsupported }
/** Delete the underlying file or directory (recursively). */
- def delete {
- throw new UnsupportedOperationException
- }
+ def delete { unsupported }
/** Returns an abstract file with the given name. It does not
* check that it exists.
*/
- def lookupNameUnchecked(name: String, directory: Boolean): AbstractFile =
- throw new UnsupportedOperationException()
+ def lookupNameUnchecked(name: String, directory: Boolean): AbstractFile = unsupported
private val files = mutable.Map.empty[String, AbstractFile]
// the toList is so that the directory may continue to be
// modified while its elements are iterated
- def iterator = files.valuesIterator.toList.iterator
+ def iterator = files.values.toList.iterator
override def lookupName(name: String, directory: Boolean): AbstractFile =
files get name filter (_.isDirectory == directory) orNull
diff --git a/src/compiler/scala/tools/nsc/io/VirtualFile.scala b/src/compiler/scala/tools/nsc/io/VirtualFile.scala
index edbcde1ff3..6e3ad7ead6 100644
--- a/src/compiler/scala/tools/nsc/io/VirtualFile.scala
+++ b/src/compiler/scala/tools/nsc/io/VirtualFile.scala
@@ -62,7 +62,7 @@ class VirtualFile(val name: String, _path: String) extends AbstractFile
}
}
- def container : AbstractFile = throw new Error("not supported")
+ def container: AbstractFile = unsupported
/** Is this abstract file a directory? */
def isDirectory: Boolean = false
@@ -77,14 +77,10 @@ class VirtualFile(val name: String, _path: String) extends AbstractFile
}
/** Does this abstract file denote an existing file? */
- def create {
- throw new UnsupportedOperationException
- }
+ def create { unsupported }
/** Delete the underlying file or directory (recursively). */
- def delete {
- throw new UnsupportedOperationException
- }
+ def delete { unsupported }
/**
* Returns the abstract file in this abstract directory with the
@@ -104,8 +100,7 @@ class VirtualFile(val name: String, _path: String) extends AbstractFile
/** Returns an abstract file with the given name. It does not
* check that it exists.
*/
- def lookupNameUnchecked(name: String, directory: Boolean): AbstractFile =
- throw new UnsupportedOperationException()
+ def lookupNameUnchecked(name: String, directory: Boolean) = unsupported
//########################################################################
}
diff --git a/src/compiler/scala/tools/nsc/io/ZipArchive.scala b/src/compiler/scala/tools/nsc/io/ZipArchive.scala
index 62f1491fdf..4ee3a29671 100644
--- a/src/compiler/scala/tools/nsc/io/ZipArchive.scala
+++ b/src/compiler/scala/tools/nsc/io/ZipArchive.scala
@@ -14,7 +14,6 @@ import java.io.{ File => JFile, IOException, InputStream, BufferedInputStream, B
import java.util.zip.{ ZipEntry, ZipFile, ZipInputStream }
import PartialFunction._
-import scala.collection.Traversable
import scala.collection.mutable.{ Map, HashMap }
import scala.collection.JavaConversions.asIterator
@@ -157,8 +156,7 @@ private[io] trait ZipContainer extends AbstractFile
/** Returns an abstract file with the given name. It does not
* check that it exists.
*/
- override def lookupNameUnchecked(name: String, directory: Boolean): AbstractFile =
- throw new UnsupportedOperationException()
+ override def lookupNameUnchecked(name: String, directory: Boolean) = unsupported
/** Returns all abstract subfiles of this abstract directory. */
override def iterator: Iterator[AbstractFile] = root.iterator
@@ -207,9 +205,9 @@ final class ZipArchive(file: File, val archive: ZipFile) extends PlainFile(file)
path: String
) extends VirtualFile(name, path)
{
- final override def path = "%s(%s)".format(self, pathInArchive)
- final def getArchive = self.archive
- def pathInArchive = super.path
+ override def underlyingSource = Some(self)
+ final override def path = "%s(%s)".format(self, super.path)
+ final def archive = self.archive
override def hashCode = super.hashCode + container.hashCode
override def equals(that : Any) =
@@ -234,15 +232,13 @@ final class ZipArchive(file: File, val archive: ZipFile) extends PlainFile(file)
val entry: ZipEntry
) extends Entry(container, name, path) with FileEntryInterface
{
- def archive = self.archive
override def input = archive getInputStream entry
}
private def zipTraversableFromZipFile(z: ZipFile): ZipTrav =
- new Traversable[ZipEntry] {
- def zis: ZipInputStream = null // not valid for this type
- val itStream = asIterator(z.entries()).toStream
- def foreach[U](f: ZipEntry => U) = itStream foreach f
+ new Iterable[ZipEntry] {
+ def zis: ZipInputStream = null // not valid for this type
+ def iterator = asIterator(z.entries())
}
}
@@ -275,13 +271,14 @@ final class URLZipArchive(url: URL) extends AbstractFile with ZipContainer
/** Methods we don't support but have to implement because of the design */
def file: JFile = null
- def create: Unit = throw new UnsupportedOperationException
- def delete: Unit = throw new UnsupportedOperationException
- def output = throw new Error("unsupported")
- def container = throw new Error("unsupported")
+ def create: Unit = unsupported
+ def delete: Unit = unsupported
+ def output = unsupported
+ def container = unsupported
abstract class Entry(name: String, path: String) extends VirtualFile(name, path) {
final override def path = "%s(%s)".format(URLZipArchive.this, super.path)
+ override def container = URLZipArchive.this
}
final class DirEntry(name: String, path: String) extends Entry(name, path) with DirEntryInterface {
def source = input
diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
index 5838fa47c1..92dbc97965 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
@@ -9,7 +9,7 @@
package scala.tools.nsc
package javac
-import scala.tools.nsc.util.{Position, OffsetPosition, NoPosition, BatchSourceFile}
+import scala.tools.nsc.util.{OffsetPosition, BatchSourceFile}
import scala.collection.mutable.ListBuffer
import symtab.Flags
import JavaTokens._
@@ -429,7 +429,7 @@ trait JavaParsers extends JavaScanners {
return Modifiers(flags, privateWithin)
}
}
- throw new Error("should not be here")
+ abort("should not be here")
}
def typeParams(): List[TypeDef] =
diff --git a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala
index 113f1265e5..6a698a0708 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala
@@ -7,7 +7,7 @@ package scala.tools.nsc
package javac
import scala.tools.nsc.util._
-import Chars.{LF, FF, CR, SU}
+import Chars._
import JavaTokens._
import scala.annotation.switch
@@ -685,24 +685,6 @@ trait JavaScanners {
// Identifiers ---------------------------------------------------------------
- def isIdentStart(c: Char): Boolean = (
- ('A' <= c && c <= 'Z') ||
- ('a' <= c && c <= 'a') ||
- (c == '_') || (c == '$') ||
- Character.isUnicodeIdentifierStart(c)
- )
-
- def isIdentPart(c: Char) = (
- isIdentStart(c) ||
- ('0' <= c && c <= '9') ||
- Character.isUnicodeIdentifierPart(c)
- )
-
- def isSpecial(c: Char) = {
- val chtp = Character.getType(c)
- chtp == Character.MATH_SYMBOL.toInt || chtp == Character.OTHER_SYMBOL.toInt
- }
-
private def getIdentRest {
while (true) {
(in.ch: @switch) match {
@@ -754,13 +736,13 @@ trait JavaScanners {
in.next
if ('0' <= in.ch && in.ch <= '7') {
val leadch: Char = in.ch
- var oct: Int = in.digit2int(in.ch, 8)
+ var oct: Int = digit2int(in.ch, 8)
in.next
if ('0' <= in.ch && in.ch <= '7') {
- oct = oct * 8 + in.digit2int(in.ch, 8)
+ oct = oct * 8 + digit2int(in.ch, 8)
in.next
if (leadch <= '3' && '0' <= in.ch && in.ch <= '7') {
- oct = oct * 8 + in.digit2int(in.ch, 8)
+ oct = oct * 8 + digit2int(in.ch, 8)
in.next
}
}
@@ -840,7 +822,7 @@ trait JavaScanners {
var i = 0
val len = name.length
while (i < len) {
- val d = in.digit2int(name(i), base)
+ val d = digit2int(name(i), base)
if (d < 0) {
syntaxError("malformed integer number")
return 0
@@ -879,7 +861,7 @@ trait JavaScanners {
/** read a number into name and set base
*/
protected def getNumber {
- while (in.digit2int(in.ch, if (base < 10) 10 else base) >= 0) {
+ while (digit2int(in.ch, if (base < 10) 10 else base) >= 0) {
putChar(in.ch)
in.next
}
@@ -894,7 +876,7 @@ trait JavaScanners {
in.next
return getFraction
case _ =>
- if (!isIdentStart(lookahead.ch)) {
+ if (!isIdentifierStart(lookahead.ch)) {
putChar(in.ch)
in.next
return getFraction
diff --git a/src/compiler/scala/tools/nsc/javac/JavaTokens.scala b/src/compiler/scala/tools/nsc/javac/JavaTokens.scala
index 3e0637d374..c91ecc95c2 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaTokens.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaTokens.scala
@@ -7,21 +7,8 @@
package scala.tools.nsc
package javac
-object JavaTokens {
+object JavaTokens extends ast.parser.Tokens {
- /** special tokens */
- final val EMPTY = -3
- final val UNDEF = -2
- final val ERROR = -1
- final val EOF = 0
-
- /** literals */
- final val CHARLIT = 1
- final val INTLIT = 2
- final val LONGLIT = 3
- final val FLOATLIT = 4
- final val DOUBLELIT = 5
- final val STRINGLIT = 6
def isLiteral(code : Int) =
code >= CHARLIT && code <= STRINGLIT
@@ -139,20 +126,4 @@ object JavaTokens {
final val RBRACKET = 118
final val LBRACE = 119
final val RBRACE = 120
-
- def isBrace(code : Int) =
- code >= LPAREN && code <= RBRACE
- def isOpenBrace(code : Int) = isBrace(code) && (code % 2 == 0)
- def isCloseBrace(code : Int) = isBrace(code) && (code % 2 == 1)
-
- def isSpace(at : Char) = at match {
- case ' ' | '\t' => true
- case _ => false
- }
- import util.Chars._
-
- def isNewLine(at : Char) = at match {
- case CR | LF | FF => true
- case _ => false
- }
}
diff --git a/src/compiler/scala/tools/nsc/matching/MatchSupport.scala b/src/compiler/scala/tools/nsc/matching/MatchSupport.scala
index 1722500066..2f50435db6 100644
--- a/src/compiler/scala/tools/nsc/matching/MatchSupport.scala
+++ b/src/compiler/scala/tools/nsc/matching/MatchSupport.scala
@@ -13,9 +13,7 @@ import java.io.{ StringWriter, PrintWriter }
/** Ancillary bits of ParallelMatching which are better off
* out of the way.
*/
-trait MatchSupport extends ast.TreeDSL
-{
- self: ParallelMatching =>
+trait MatchSupport extends ast.TreeDSL { self: ParallelMatching =>
import global.{ typer => _, _ }
import CODE._
@@ -24,28 +22,15 @@ trait MatchSupport extends ast.TreeDSL
private final def trace = settings.Ypmatdebug.value
def impossible: Nothing = abort("this never happens")
- def abort(msg: String): Nothing = Predef.error(msg)
object Types {
import definitions._
implicit def enrichType(x: Type): RichType = new RichType(x)
- // see bug1434.scala for an illustration of why "x <:< y" is insufficient.
- // this code is definitely inadequate at best. Inherited comment:
- //
- // an approximation of _tp1 <:< tp2 that ignores _ types. this code is wrong,
- // ideally there is a better way to do it, and ideally defined in Types.scala
- private[matching] def matches(arg1: Type, arg2: Type) = {
- val List(t1, t2) = List(arg1, arg2) map decodedEqualsType
- def eqSymbols = t1.typeSymbol eq t2.typeSymbol
- // note: writing this as "t1.baseTypeSeq exists (_ =:= t2)" does not lead to 1434 passing.
- def isSubtype = t1.baseTypeSeq exists (_.typeSymbol eq t2.typeSymbol)
-
- (t1 <:< t2) || ((t1, t2) match {
- case (_: TypeRef, _: TypeRef) => !t1.isArray && (t1.prefix =:= t2.prefix) && (eqSymbols || isSubtype)
- case _ => false
- })
- }
+ // A subtype test which creates fresh existentials for type
+ // parameters on the right hand side.
+ private[matching] def matches(arg1: Type, arg2: Type) =
+ decodedEqualsType(arg1) matchesPattern decodedEqualsType(arg2)
class RichType(undecodedTpe: Type) {
def tpe = decodedEqualsType(undecodedTpe)
@@ -102,26 +87,13 @@ trait MatchSupport extends ast.TreeDSL
pp(x match {
case s: String => return clean(s)
- case x: Tree => treeToCompactString(x)
+ case x: Tree => asCompactString(x)
case xs: List[_] => pplist(xs map pp)
case x: Tuple2[_,_] => "%s -> %s".format(pp(x._1), pp(x._2))
case x => x.toString
})
}
- object compactTreePrinter extends CompactTreePrinter
-
- // def treeChildrenString(t: Tree): String =
- // nodeToString(t)
-
- def treeToCompactString(t: Tree): String = {
- val buffer = new StringWriter()
- val printer = compactTreePrinter.create(new PrintWriter(buffer))
- printer.print(t)
- printer.flush()
- buffer.toString
- }
-
def ifDebug(body: => Unit): Unit = { if (settings.debug.value) body }
def DBG(msg: => String): Unit = { ifDebug(println(msg)) }
@@ -166,121 +138,4 @@ trait MatchSupport extends ast.TreeDSL
*/
def extractIndex[T](xs: List[T], n: Int): (T, List[T]) =
(xs(n), dropIndex(xs, n))
-
- /** A tree printer which is stingier about vertical whitespace and unnecessary
- * punctuation than the standard one.
- */
- class CompactTreePrinter extends {
- val trees: global.type = global
- } with TreePrinters {
- import trees._
-
- override def create(writer: PrintWriter): TreePrinter = new TreePrinter(writer) {
- // drill down through Blocks and pull out the real statements.
- def allStatements(t: Tree): List[Tree] = t match {
- case Block(stmts, expr) => (stmts flatMap allStatements) ::: List(expr)
- case _ => List(t)
- }
-
- def printLogicalOr(t1: (Tree, Boolean), t2: (Tree, Boolean)) =
- printLogicalOp(t1, t2, "||")
-
- def printLogicalAnd(t1: (Tree, Boolean), t2: (Tree, Boolean)) =
- printLogicalOp(t1, t2, "&&")
-
- def printLogicalOp(t1: (Tree, Boolean), t2: (Tree, Boolean), op: String) = {
- def maybenot(tvalue: Boolean) = if (tvalue) "" else "!"
-
- printRow(List(t1._1, t2._1),
- " %s(" format maybenot(t1._2),
- ") %s %s(".format(op, maybenot(t2._2)),
- ")"
- )
- }
-
- override def printRaw(tree: Tree): Unit = {
- // routing supercalls through this for debugging ease
- def s() = super.printRaw(tree)
-
- tree match {
- // labels used for jumps - does not map to valid scala code
- case LabelDef(name, params, rhs) =>
- print("labeldef %s(%s) = ".format(name, params mkString ","))
- printRaw(rhs)
-
- // target.method(arg) ==> target method arg
- case Apply(Select(target, method), List(arg)) =>
- (target, arg) match {
- case (_: Ident, _: Literal | _: Ident) =>
- printRaw(target)
- print(" %s " format symName(tree, method))
- printRaw(arg)
- case _ => s()
- }
-
- // target.unary_! ==> !target
- case Select(qualifier, name) =>
- val n = symName(tree, name)
- if (n startsWith "unary_") {
- print(n drop 6)
- print(qualifier)
- }
- else s()
-
- // target.toString() ==> target.toString
- case Apply(fn, Nil) => printRaw(fn)
-
- // if a Block only continues one actual statement, just print it.
- case Block(stats, expr) =>
- allStatements(tree) match {
- case List(x) => printRow(List(x), "", ";", "")
- case _ => s()
- }
-
- // We get a lot of this stuff
- case If( IsTrue(), x, _) => printRaw(x)
- case If(IsFalse(), _, x) => printRaw(x)
-
- case If(cond, IsTrue(), elsep) =>
- printLogicalOr(cond -> true, elsep -> true)
-
- case If(cond, IsFalse(), elsep) =>
- printLogicalAnd(cond -> false, elsep -> true)
-
- case If(cond, thenp, IsTrue()) =>
- printLogicalOr(cond -> false, thenp -> true)
-
- case If(cond, thenp, IsFalse()) =>
- printLogicalAnd(cond -> true, thenp -> true)
-
- // If thenp or elsep has only one statement, it doesn't need more than one line.
- case If(cond, thenp, elsep) =>
- printRow(List(cond), "if (", "", ") ")
-
- def ifIndented(x: Tree) = {
- indent ; println ; printRaw(x) ; undent
- }
-
- indent ; println ;
- allStatements(thenp) match {
- case List(x: If) => ifIndented(x)
- case List(x) => printRaw(x)
- case _ => printRaw(thenp)
- }
- undent ; println ;
- val elseStmts = allStatements(elsep)
- if (!elseStmts.isEmpty) {
- print("else")
- indent ; println
- elseStmts match {
- case List(x) => printRaw(x)
- case xs => printRaw(elsep)
- }
- undent ; println
- }
- case _ => s()
- }
- }
- }
- }
}
diff --git a/src/compiler/scala/tools/nsc/matching/Matrix.scala b/src/compiler/scala/tools/nsc/matching/Matrix.scala
index 61ca97ea24..de3204318f 100644
--- a/src/compiler/scala/tools/nsc/matching/Matrix.scala
+++ b/src/compiler/scala/tools/nsc/matching/Matrix.scala
@@ -7,7 +7,6 @@ package scala.tools.nsc
package matching
import transform.ExplicitOuter
-import util.Position
import symtab.Flags
trait Matrix extends MatrixAdditions {
@@ -50,7 +49,7 @@ trait Matrix extends MatrixAdditions {
The is the real work-horse of the algorithm. There is some column whose top-most pattern is a
constructor. (Forsimplicity, itisdepicted above asthe left-most column, but anycolumn will do.)
The goal is to build a test state with the variablevand some outgoing arcs (one for each construc-
- tor and possibly a default arc). Foreach constructorcin the selected column, its arc is deï¬ned as
+ tor and possibly a default arc). Foreach constructor in the selected column, its arc is deï¬ned as
follows:
Let {i1,...,ij} be the rows-indices of the patterns in the column that match c. Since the pat-
@@ -157,7 +156,7 @@ trait Matrix extends MatrixAdditions {
def tpe = valsym.tpe
lazy val ident = ID(lhs)
- lazy val valDef = tracing("typedVal", typer typedValDef (VAL(lhs) === rhs))
+ lazy val valDef = tracing("typedVal", typer typedValDef (VAL(lhs) === rhs) setPos lhs.pos)
override def toString() = "%s: %s = %s".format(lhs, lhs.info, rhs)
}
diff --git a/src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala b/src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala
index 4f13d4fd99..d3dddbfaaf 100644
--- a/src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala
+++ b/src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala
@@ -20,22 +20,7 @@ trait MatrixAdditions extends ast.TreeDSL
import symtab.Flags
import CODE._
import Debug._
-
- // Extractors which can spot pure true/false expressions
- // even through the haze of braces
- abstract class SeeThroughBlocks[T] {
- protected def unapplyImpl(x: Tree): T
- def unapply(x: Tree): T = x match {
- case Block(Nil, expr) => unapply(expr)
- case _ => unapplyImpl(x)
- }
- }
- object IsTrue extends SeeThroughBlocks[Boolean] {
- protected def unapplyImpl(x: Tree): Boolean = x equalsStructure TRUE
- }
- object IsFalse extends SeeThroughBlocks[Boolean] {
- protected def unapplyImpl(x: Tree): Boolean = x equalsStructure FALSE
- }
+ import treeInfo.{ IsTrue, IsFalse }
/** The Squeezer, responsible for all the squeezing.
*/
@@ -45,9 +30,15 @@ trait MatrixAdditions extends ast.TreeDSL
def squeezedBlockPVs(pvs: List[PatternVar], exp: Tree): Tree =
squeezedBlock(pvs map (_.valDef), exp)
+ /** Compresses multiple Blocks. */
+ def mkBlock(stats: List[Tree], expr: Tree): Tree = expr match {
+ case Block(stats1, expr1) if stats.isEmpty => mkBlock(stats1, expr1)
+ case _ => Block(stats, expr)
+ }
+
def squeezedBlock(vds: List[Tree], exp: Tree): Tree =
- if (settings_squeeze) Block(Nil, squeezedBlock1(vds, exp))
- else Block(vds, exp)
+ if (settings_squeeze) mkBlock(Nil, squeezedBlock1(vds, exp))
+ else mkBlock(vds, exp)
private def squeezedBlock1(vds: List[Tree], exp: Tree): Tree = {
class RefTraverser(sym: Symbol) extends Traverser {
@@ -169,16 +160,26 @@ trait MatrixAdditions extends ast.TreeDSL
def cmpSymbols(t1: Type, t2: Type) = t1.typeSymbol eq t2.typeSymbol
def coversSym = {
val tpe = decodedEqualsType(p.tpe)
- lazy val lmoc = sym.linkedModuleOfClass
+ lazy val lmoc = sym.companionModule
val symtpe =
if ((sym hasFlag Flags.MODULE) && (lmoc ne NoSymbol))
singleType(sym.tpe.prefix, lmoc) // e.g. None, Nil
else sym.tpe
+ /** Note to Martin should you come through this way: this
+ * logic looks way overcomplicated for the intention, but a little
+ * experimentation showed that at least most of it is serving
+ * some necessary purpose. It doesn't seem like much more than
+ * "sym.tpe matchesPattern tpe" ought to be necessary though.
+ *
+ * For the time being I tacked the matchesPattern test onto the
+ * end to address #3097.
+ */
(tpe.typeSymbol == sym) ||
(symtpe <:< tpe) ||
(symtpe.parents exists (x => cmpSymbols(x, tpe))) || // e.g. Some[Int] <: Option[&b]
- ((tpe.prefix memberType sym) <:< tpe) // outer, see combinator.lexical.Scanner
+ ((tpe.prefix memberType sym) <:< tpe) || // outer, see combinator.lexical.Scanner
+ (symtpe matchesPattern tpe)
}
cond(p.tree) {
@@ -195,18 +196,13 @@ trait MatrixAdditions extends ast.TreeDSL
private def requiresExhaustive(s: Symbol) =
(s hasFlag MUTABLE) && // indicates that have not yet checked exhaustivity
!(s hasFlag TRANS_FLAG) && // indicates @unchecked
- (s.tpe.typeSymbol hasFlag SEALED) &&
+ (s.tpe.typeSymbol.isSealed) &&
{ s resetFlag MUTABLE ; true } // side effects MUTABLE flag
- private def sealedSymsFor(s: Symbol): Set[Symbol] = {
- val kids = s.children flatMap sealedSymsFor
- if (s hasFlag ABSTRACT) kids else kids + s
- }
-
private lazy val inexhaustives: List[List[Combo]] = {
val collected =
for ((pv, i) <- tvars.zipWithIndex ; val sym = pv.lhs ; if requiresExhaustive(sym)) yield
- i -> sealedSymsFor(sym.tpe.typeSymbol)
+ i -> sym.tpe.typeSymbol.sealedDescendants
val folded =
collected.foldRight(List[List[Combo]]())((c, xs) => {
diff --git a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
index dda679e96e..d4a920008f 100644
--- a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
+++ b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
@@ -95,7 +95,7 @@ trait ParallelMatching extends ast.TreeDSL
def sym = pv.sym
def tpe = sym.tpe
def pos = sym.pos
- def id = ID(sym) // attributed ident
+ def id = ID(sym) setPos pos // attributed ident
def accessors = if (isCaseClass) sym.caseFieldAccessors else Nil
def accessorTypes = accessors map (x => (tpe memberType x).resultType)
@@ -149,7 +149,7 @@ trait ParallelMatching extends ast.TreeDSL
if (!scrut.isSimple) None
else {
val (_lits, others) = ps span isSwitchableConst
- val lits = _lits partialMap { case x: LiteralPattern => x }
+ val lits = _lits collect { case x: LiteralPattern => x }
condOpt(others) {
case Nil => new PatternSwitch(scrut, lits, None)
@@ -537,7 +537,10 @@ trait ParallelMatching extends ast.TreeDSL
case Pattern(LIT(null), _) if !(p =:= s) => (None, passr) // (1)
case x if isObjectTest => (passl(), None) // (2)
case Pattern(Typed(pp, _), _) if sMatchesP => (typed(pp), None) // (4)
- case Pattern(_: UnApply, _) => (passl(), passr)
+ // The next line used to be this which "fixed" 1697 but introduced
+ // numerous regressions including #3136.
+ // case Pattern(_: UnApply, _) => (passl(), passr)
+ case Pattern(_: UnApply, _) => (None, passr)
case x if !x.isDefault && sMatchesP => (subs(), None)
case x if x.isDefault || pMatchesS => (passl(), passr)
case _ => (None, passr)
@@ -692,34 +695,28 @@ trait ParallelMatching extends ast.TreeDSL
}
def createLabelBody(index: Int, pvgroup: PatternVarGroup) = {
- def args = pvgroup.syms
- def vdefs = pvgroup.valDefs
+ val args = pvgroup.syms
+ val vdefs = pvgroup.valDefs
val name = "body%" + index
require(_labelSym == null)
referenceCount += 1
if (isLabellable) {
- // val mtype = MethodType(freeVars, bodyTpe)
- val mtype = MethodType(args, bodyTpe)
+ val mtype = MethodType(freeVars, bodyTpe)
_labelSym = owner.newLabel(body.pos, name) setInfo mtype
TRACE("Creating index %d: mtype = %s".format(bx, mtype))
- if (freeVars.size != args.size)
- TRACE("We will be hosed! freeVars = %s, args = %s, vdefs = %s".format(freeVars, args, vdefs))
-
- // Labelled expression - the symbols in the array (must be Idents!)
- // are those the label takes as argument
- _label = typer typedLabelDef LabelDef(_labelSym, args, body setType bodyTpe)
- TRACE("[New label] def %s%s: %s = %s".format(name, pp(args), bodyTpe, body))
+ _label = typer typedLabelDef LabelDef(_labelSym, freeVars, body setType bodyTpe)
+ TRACE("[New label] def %s%s: %s = %s".format(name, pp(freeVars), bodyTpe, body))
}
ifLabellable(vdefs, squeezedBlock(vdefs, label))
}
def getLabelBody(pvgroup: PatternVarGroup): Tree = {
- def idents = pvgroup map (_.rhs)
- def vdefs = pvgroup.valDefs
+ val idents = pvgroup map (_.rhs)
+ val vdefs = pvgroup.valDefs
referenceCount += 1
// if (idents.size != labelParamTypes.size)
// consistencyFailure(idents, vdefs)
@@ -836,7 +833,12 @@ trait ParallelMatching extends ast.TreeDSL
// type, but if the value doesn't appear on the right hand side of the
// match that's unimportant; so we add an instance check only if there
// is a binding.
- if (isBound) eqTest AND (scrutTree IS tpe.widen)
+ if (isBound) {
+ if (settings.Xmigration28.value) {
+ cunit.warning(scrutTree.pos, "A bound pattern such as 'x @ Pattern' now matches fewer cases than the same pattern with no binding.")
+ }
+ eqTest AND (scrutTree IS tpe.widen)
+ }
else eqTest
case _ if scrutTree.tpe <:< tpe && tpe.isAnyRef => scrutTree OBJ_!= NULL
diff --git a/src/compiler/scala/tools/nsc/matching/Patterns.scala b/src/compiler/scala/tools/nsc/matching/Patterns.scala
index 3158f8a6cf..a21a9c7d9f 100644
--- a/src/compiler/scala/tools/nsc/matching/Patterns.scala
+++ b/src/compiler/scala/tools/nsc/matching/Patterns.scala
@@ -7,8 +7,7 @@ package scala.tools.nsc
package matching
import symtab.Flags
-import util.NoPosition
-import scala.util.NameTransformer.decode
+import scala.reflect.NameTransformer.decode
import PartialFunction._
/** Patterns are wrappers for Trees with enhanced semantics.
@@ -131,7 +130,7 @@ trait Patterns extends ast.TreeDSL {
case class ConstructorPattern(tree: Apply) extends ApplyPattern with NamePattern {
require(fn.isType && this.isCaseClass)
def name = tpe.typeSymbol.name
- def cleanName = tpe.typeSymbol.cleanNameString
+ def cleanName = tpe.typeSymbol.decodedName
def hasPrefix = tpe.prefix.prefixString != ""
def prefixedName =
if (hasPrefix) "%s.%s".format(tpe.prefix.prefixString, cleanName)
diff --git a/src/compiler/scala/tools/nsc/matching/TransMatcher.scala b/src/compiler/scala/tools/nsc/matching/TransMatcher.scala
index b4a15733f9..be455b00b0 100644
--- a/src/compiler/scala/tools/nsc/matching/TransMatcher.scala
+++ b/src/compiler/scala/tools/nsc/matching/TransMatcher.scala
@@ -7,12 +7,10 @@
package scala.tools.nsc
package matching
-import util.Position
-import ast.{ TreePrinters, Trees }
import symtab.SymbolTable
import transform.ExplicitOuter
import java.io.{ StringWriter, PrintWriter }
-import scala.util.NameTransformer.decode
+import scala.reflect.NameTransformer.decode
import PartialFunction._
/** Translation of pattern matching
@@ -46,7 +44,7 @@ trait TransMatcher extends ast.TreeDSL {
{
import context._
- def matchError(obj: Tree) = atPos(selector.pos)(THROW(MatchErrorClass, obj))
+ def matchError(obj: Tree) = atPos(selector.pos)(MATCHERROR(obj))
def caseIsOk(c: CaseDef) = cond(c.pat) { case _: Apply | Ident(nme.WILDCARD) => true }
def rootTypes = selector.tpe.typeArgs
diff --git a/src/compiler/scala/tools/nsc/plugins/Plugin.scala b/src/compiler/scala/tools/nsc/plugins/Plugin.scala
index d3b5c0a22d..f527441c68 100644
--- a/src/compiler/scala/tools/nsc/plugins/Plugin.scala
+++ b/src/compiler/scala/tools/nsc/plugins/Plugin.scala
@@ -131,13 +131,13 @@ object Plugin {
dirs: List[Path],
ignoring: List[String]): List[AnyClass] =
{
- val alljars = jars ::: (for {
+ val alljars = (jars ::: (for {
dir <- dirs if dir.isDirectory
- entry <- dir.toDirectory.files.toList sortWith (_.name <= _.name)
- if entry.name.toLowerCase endsWith ".jar"
+ entry <- dir.toDirectory.files.toList sortBy (_.name)
+ if entry.extension == "jar"
pdesc <- loadDescription(entry)
if !(ignoring contains pdesc.name)
- } yield entry)
+ } yield entry)).distinct
val loader = loaderFor(alljars)
alljars map (loadFrom(_, loader)) flatten
diff --git a/src/compiler/scala/tools/nsc/plugins/Plugins.scala b/src/compiler/scala/tools/nsc/plugins/Plugins.scala
index 90d306614a..e800e0f904 100644
--- a/src/compiler/scala/tools/nsc/plugins/Plugins.scala
+++ b/src/compiler/scala/tools/nsc/plugins/Plugins.scala
@@ -30,10 +30,10 @@ trait Plugins
val dirs = (settings.pluginsDir.value split File.pathSeparator).toList map Path.apply
val classes = Plugin.loadAllFrom(jars, dirs, settings.disable.value)
- classes foreach (c => Plugin.instantiate(c, this))
-
- for (plugClass <- Plugin.loadAllFrom(jars, dirs, settings.disable.value))
- yield Plugin.instantiate(plugClass, this)
+ // Lach plugin must only be instantiated once. A common pattern
+ // is to register annotation checkers during object construction, so
+ // creating multiple plugin instances will leave behind stale checkers.
+ classes map (Plugin.instantiate(_, this))
}
protected lazy val roughPluginsList: List[Plugin] = loadRoughPluginsList
diff --git a/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala b/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala
index a2f6b1f247..ce10f560e9 100644
--- a/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala
+++ b/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala
@@ -27,15 +27,19 @@ abstract class AbstractReporter extends Reporter {
def display(pos: Position, msg: String, severity: Severity): Unit
def displayPrompt: Unit
- protected def info0(pos: Position, msg: String, severity: Severity, force: Boolean) {
+ protected def info0(pos: Position, msg: String, _severity: Severity, force: Boolean) {
+ val severity =
+ if (settings.Ywarnfatal.value && _severity == WARNING) ERROR
+ else _severity
+
severity match {
case INFO =>
if (force || settings.verbose.value) display(pos, msg, severity)
case WARNING =>
val hidden = testAndLog(pos, severity)
if (!settings.nowarnings.value) {
- if (!hidden || settings.prompt.value) display(pos, msg, severity)
- if (settings.prompt.value) displayPrompt
+ if (!hidden || settings.prompt.value) display(pos, msg, severity)
+ if (settings.prompt.value) displayPrompt
}
case ERROR =>
val hidden = testAndLog(pos, severity)
diff --git a/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala b/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
index 6564cf881f..f7d380c975 100644
--- a/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
+++ b/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
@@ -103,18 +103,21 @@ class ConsoleReporter(val settings: Settings, reader: BufferedReader, writer: Pr
writer.flush()
var line = reader.readLine()
if (line ne null) {
- line = line.toLowerCase()
- if ("abort" startsWith line)
- throw new Error("user abort")
- if ("resume" startsWith line) continue = false
+ line = line.toLowerCase()
+ if ("abort" startsWith line)
+ abort("user abort")
+ if ("resume" startsWith line)
+ continue = false
}
}
- } catch {
+ }
+ catch {
case ex: IOException => {
ex.printStackTrace()
- throw new Error("input read error")
+ abort("input read error")
}
}
+ private def abort(msg: String) = throw new Error(msg)
override def flush() { writer.flush() }
}
diff --git a/src/compiler/scala/tools/nsc/reporters/Reporter.scala b/src/compiler/scala/tools/nsc/reporters/Reporter.scala
index 99292f2338..bdb6c6ae6f 100644
--- a/src/compiler/scala/tools/nsc/reporters/Reporter.scala
+++ b/src/compiler/scala/tools/nsc/reporters/Reporter.scala
@@ -31,7 +31,8 @@ abstract class Reporter {
}
var cancelled: Boolean = false
- def hasErrors: Boolean = ERROR.count != 0 || cancelled
+ def hasErrors: Boolean = ERROR.count > 0 || cancelled
+ def hasWarnings: Boolean = WARNING.count > 0
/** Flush all output */
def flush() { }
diff --git a/src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala
new file mode 100644
index 0000000000..790f4cce00
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala
@@ -0,0 +1,40 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package settings
+
+trait AbsScalaSettings {
+ self: AbsSettings =>
+
+ type BooleanSetting <: AbsSetting { type T = Boolean }
+ type ChoiceSetting <: AbsSetting { type T = String }
+ type DefinesSetting <: AbsSetting { type T = List[(String, String)] }
+ type IntSetting <: AbsSetting { type T = Int }
+ type MultiStringSetting <: AbsSetting { type T = List[String] }
+ type PathSetting <: AbsSetting { type T = String }
+ type PhasesSetting <: AbsSetting { type T = List[String] }
+ type StringSetting <: AbsSetting { type T = String }
+
+ type OutputDirs
+ type OutputSetting <: AbsSetting
+
+ def BooleanSetting(name: String, descr: String): BooleanSetting
+ def ChoiceSetting(name: String, descr: String, choices: List[String], default: String): ChoiceSetting
+ def DefinesSetting(): DefinesSetting
+ def IntSetting(name: String, descr: String, default: Int, range: Option[(Int, Int)], parser: String => Option[Int]): IntSetting
+ def MultiStringSetting(name: String, arg: String, descr: String): MultiStringSetting
+ def OutputSetting(outputDirs: OutputDirs, default: String): OutputSetting
+ def PathSetting(name: String, arg: String, descr: String, default: String): PathSetting
+ def PhasesSetting(name: String, descr: String): PhasesSetting
+ def StringSetting(name: String, arg: String, descr: String, default: String): StringSetting
+
+ /** **/
+ abstract class SettingGroup(val prefix: String) extends AbsSetting {
+ def name = prefix
+ def helpDescription: String = error("todo")
+ def unparse: List[String] = List(name)
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/settings/AbsSettings.scala b/src/compiler/scala/tools/nsc/settings/AbsSettings.scala
new file mode 100644
index 0000000000..75e2c5ce11
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/settings/AbsSettings.scala
@@ -0,0 +1,134 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package settings
+
+import io.AbstractFile
+
+/** A Settings abstraction boiled out of the original highly mutable Settings
+ * class with the intention of creating an ImmutableSettings which can be used
+ * interchangeably. Except of course without the mutants.
+ */
+
+trait AbsSettings {
+ type Setting <: AbsSetting // Fix to the concrete Setting type
+ type ResultOfTryToSet // List[String] in mutable, (Settings, List[String]) in immutable
+ def errorFn: String => Unit
+ protected def allSettings: collection.Set[Setting]
+
+ // settings minus internal usage settings
+ def visibleSettings = allSettings filterNot (_.isInternalOnly)
+
+ // only settings which differ from default
+ def userSetSettings = visibleSettings filterNot (_.isDefault)
+
+ // an argument list which (should) be usable to recreate the Settings
+ def recreateArgs = userSetSettings.toList flatMap (_.unparse)
+
+ // checks both name and any available abbreviations
+ def lookupSetting(cmd: String): Option[Setting] = allSettings find (_ respondsTo cmd)
+
+ // two AbsSettings objects are equal if their visible settings are equal.
+ override def hashCode() = visibleSettings.hashCode
+ override def equals(that: Any) = that match {
+ case s: AbsSettings => this.visibleSettings == s.visibleSettings
+ case _ => false
+ }
+ override def toString() = "Settings {\n%s}\n" format (userSetSettings map (" " + _ + "\n") mkString)
+ def toConciseString = userSetSettings.mkString("(", " ", ")")
+
+ def checkDependencies =
+ visibleSettings filterNot (_.isDefault) forall (setting => setting.dependencies forall {
+ case (dep, value) =>
+ (Option(dep.value) exists (_.toString == value)) || {
+ errorFn("incomplete option %s (requires %s)".format(setting.name, dep.name))
+ false
+ }
+ })
+
+ implicit lazy val SettingOrdering: Ordering[Setting] = Ordering.ordered
+
+ trait AbsSettingValue {
+ type T <: Any
+ def value: T
+ def isDefault: Boolean
+ }
+
+ trait AbsSetting extends Ordered[Setting] with AbsSettingValue {
+ def name: String
+ def helpDescription: String
+ def unparse: List[String] // A list of Strings which can recreate this setting.
+
+ /* For tools which need to populate lists of available choices */
+ def choices : List[String] = Nil
+
+ /** In mutable Settings, these return the same object with a var set.
+ * In immutable, of course they will return a new object, which means
+ * we can't use "this.type", at least not in a non-casty manner, which
+ * is unfortunate because we lose type information without it.
+ */
+ def withAbbreviation(name: String): Setting
+ def withHelpSyntax(help: String): Setting
+
+ def helpSyntax: String = name
+ def abbreviations: List[String] = Nil
+ def dependencies: List[(Setting, String)] = Nil
+ def respondsTo(label: String) = (name == label) || (abbreviations contains label)
+
+ /** If the setting should not appear in help output, etc. */
+ def isInternalOnly = false
+
+ /** Issue error and return */
+ def errorAndValue[T](msg: String, x: T): T = { errorFn(msg) ; x }
+
+ /** After correct Setting has been selected, tryToSet is called with the
+ * remainder of the command line. It consumes any applicable arguments and
+ * returns the unconsumed ones.
+ */
+ protected[nsc] def tryToSet(args: List[String]): Option[ResultOfTryToSet]
+
+ /** Commands which can take lists of arguments in form -Xfoo:bar,baz override
+ * this method and accept them as a list. It returns List[String] for
+ * consistency with tryToSet, and should return its incoming arguments
+ * unmodified on failure, and Nil on success.
+ */
+ protected[nsc] def tryToSetColon(args: List[String]): Option[ResultOfTryToSet] =
+ errorAndValue("'%s' does not accept multiple arguments" format name, None)
+
+ /** Commands which take properties in form -Dfoo=bar or -Dfoo
+ */
+ protected[nsc] def tryToSetProperty(args: List[String]): Option[ResultOfTryToSet] =
+ errorAndValue("'%s' does not accept property style arguments" format name, None)
+
+ /** Attempt to set from a properties file style property value.
+ */
+ def tryToSetFromPropertyValue(s: String): Unit = tryToSet(s :: Nil)
+
+ /** These categorizations are so the help output shows -X and -P among
+ * the standard options and -Y among the advanced options.
+ */
+ def isAdvanced = name match { case "-Y" => true ; case "-X" => false ; case _ => name startsWith "-X" }
+ def isPrivate = name match { case "-Y" => false ; case _ => name startsWith "-Y" }
+ def isStandard = !isAdvanced && !isPrivate
+
+ def compare(that: Setting): Int = name compare that.name
+
+ /** Equality tries to sidestep all the drama and define it simply and
+ * in one place: two AbsSetting objects are equal if their names and
+ * values compare equal.
+ */
+ override def equals(that: Any) = that match {
+ case x: AbsSettings#AbsSetting => (name == x.name) && (value == x.value)
+ case _ => false
+ }
+ override def hashCode() = (name, value).hashCode
+ override def toString() = "%s = %s".format(name, value)
+ }
+
+ trait InternalSetting extends AbsSetting {
+ override def isInternalOnly = true
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/settings/AdvancedScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/AdvancedScalaSettings.scala
new file mode 100644
index 0000000000..830370a3e7
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/settings/AdvancedScalaSettings.scala
@@ -0,0 +1,76 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package settings
+
+trait AdvancedScalaSettings {
+ self: AbsScalaSettings =>
+
+ abstract class X extends SettingGroup("-X") {
+ val assemextdirs: StringSetting
+ val assemname: StringSetting
+ val assempath: StringSetting
+ val checkinit: BooleanSetting
+ val disableassertions: BooleanSetting
+ val elidebelow: IntSetting
+ val experimental: BooleanSetting
+ val future: BooleanSetting
+ val generatephasegraph: StringSetting
+ val logimplicits: BooleanSetting
+ val migration: BooleanSetting
+ val noforwarders: BooleanSetting
+ val nojline: BooleanSetting
+ val nouescape: BooleanSetting
+ val plugin: MultiStringSetting
+ val plugindisable: MultiStringSetting
+ val pluginlist: BooleanSetting
+ val pluginrequire: MultiStringSetting
+ val pluginsdir: StringSetting
+ val print: PhasesSetting
+ val printicode: BooleanSetting
+ val printpos: BooleanSetting
+ val printtypes: BooleanSetting
+ val prompt: BooleanSetting
+ val resident: BooleanSetting
+ val script: StringSetting
+ val showclass: StringSetting
+ val showobject: StringSetting
+ val showphases: BooleanSetting
+ val sourcedir: StringSetting
+ val sourcereader: StringSetting
+ }
+ // def Xexperimental = X.experimental
+ // def Xmigration28 = X.migration
+ // def Xnojline = X.nojline
+ // def Xprint = X.print
+ // def Xprintpos = X.printpos
+ // def Xshowcls = X.showclass
+ // def Xshowobj = X.showobject
+ // def assemextdirs = X.assemextdirs
+ // def assemname = X.assemname
+ // def assemrefs = X.assempath
+ // def checkInit = X.checkinit
+ // def disable = X.plugindisable
+ // def elideLevel = X.elidelevel
+ // def future = X.future
+ // def genPhaseGraph = X.generatephasegraph
+ // def logimplicits = X.logimplicits
+ // def noForwarders = X.noforwarders
+ // def noassertions = X.disableassertions
+ // def nouescape = X.nouescape
+ // def plugin = X.plugin
+ // def pluginsDir = X.pluginsdir
+ // def printtypes = X.printtypes
+ // def prompt = X.prompt
+ // def require = X.require
+ // def resident = X.resident
+ // def script = X.script
+ // def showPhases = X.showphases
+ // def showPlugins = X.pluginlist
+ // def sourceReader = X.sourcereader
+ // def sourcedir = X.sourcedir
+ // def writeICode = X.printicode
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/settings/ImmutableSettings.scala b/src/compiler/scala/tools/nsc/settings/ImmutableSettings.scala
new file mode 100644
index 0000000000..a673860417
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/settings/ImmutableSettings.scala
@@ -0,0 +1,11 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package settings
+
+/** TODO.
+ */
+class ImmutableSettings
diff --git a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
new file mode 100644
index 0000000000..44ab9c6062
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
@@ -0,0 +1,567 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Martin Odersky
+ */
+// $Id$
+
+package scala.tools.nsc
+package settings
+
+import io.AbstractFile
+import util.{ ClassPath, CommandLineParser }
+import annotation.elidable
+import scala.tools.util.StringOps
+import scala.collection.mutable.ListBuffer
+import interpreter.{ returning }
+
+/** A mutable Settings object.
+ */
+class MutableSettings(val errorFn: String => Unit) extends AbsSettings with ScalaSettings with Mutable {
+ type ResultOfTryToSet = List[String]
+
+ /** Iterates over the arguments applying them to settings where applicable.
+ * Then verifies setting dependencies are met.
+ *
+ * This temporarily takes a boolean indicating whether to keep
+ * processing if an argument is seen which is not a command line option.
+ * This is an expedience for the moment so that you can say
+ *
+ * scalac -d /tmp foo.scala -optimise
+ *
+ * while also allowing
+ *
+ * scala Program opt opt
+ *
+ * to get their arguments.
+ *
+ * Returns (success, List of unprocessed arguments)
+ */
+ def processArguments(arguments: List[String], processAll: Boolean): (Boolean, List[String]) = {
+ var args = arguments
+ val residualArgs = new ListBuffer[String]
+
+ while (args.nonEmpty) {
+ if (args.head startsWith "-") {
+ val args0 = args
+ args = this parseParams args
+ if (args eq args0) {
+ errorFn("bad option: '" + args.head + "'")
+ return ((false, args))
+ }
+ }
+ else if (args.head == "") { // discard empties, sometimes they appear because of ant or etc.
+ args = args.tail
+ }
+ else {
+ if (!processAll)
+ return ((checkDependencies, args))
+
+ residualArgs += args.head
+ args = args.tail
+ }
+ }
+
+ ((checkDependencies, residualArgs.toList))
+ }
+ def processArgumentString(params: String) = processArguments(splitParams(params), true)
+
+ /** Create a new Settings object, copying all user-set values.
+ */
+ def copy(): Settings = {
+ val s = new Settings()
+ val xs = userSetSettings flatMap (_.unparse)
+ s.processArguments(xs.toList, true)
+ s
+ }
+
+ /** A list pairing source directories with their output directory.
+ * This option is not available on the command line, but can be set by
+ * other tools (IDEs especially). The command line specifies a single
+ * output directory that is used for all source files, denoted by a
+ * '*' in this list.
+ */
+ lazy val outputDirs = new OutputDirs
+
+ /** Split the given line into parameters.
+ */
+ def splitParams(line: String) = CommandLineParser.tokenize(line, errorFn)
+
+ /** Returns any unprocessed arguments.
+ */
+ def parseParams(args: List[String]): List[String] = {
+ // verify command exists and call setter
+ def tryToSetIfExists(
+ cmd: String,
+ args: List[String],
+ setter: (Setting) => (List[String] => Option[List[String]])
+ ): Option[List[String]] =
+ lookupSetting(cmd) match {
+ case None => errorFn("Parameter '" + cmd + "' is not recognised by Scalac.") ; None
+ case Some(cmd) =>
+ val res = setter(cmd)(args)
+ cmd.postSetHook()
+ res
+ }
+
+ // if arg is of form -Xfoo:bar,baz,quux
+ def parseColonArg(s: String): Option[List[String]] = {
+ val (p, args) = StringOps.splitWhere(s, _ == ':', true) getOrElse (return None)
+
+ // any non-Nil return value means failure and we return s unmodified
+ tryToSetIfExists(p, args split "," toList, (s: Setting) => s.tryToSetColon _)
+ }
+ // if arg is of form -Dfoo=bar or -Dfoo (name = "-D")
+ def isPropertyArg(s: String) = lookupSetting(s take 2) match {
+ case Some(x: DefinesSetting) => true
+ case _ => false
+ }
+ def parsePropertyArg(s: String): Option[List[String]] = {
+ val (p, args) = (s take 2, s drop 2)
+
+ tryToSetIfExists(p, List(args), (s: Setting) => s.tryToSetProperty _)
+ }
+
+ // if arg is of form -Xfoo or -Xfoo bar (name = "-Xfoo")
+ def parseNormalArg(p: String, args: List[String]): Option[List[String]] =
+ tryToSetIfExists(p, args, (s: Setting) => s.tryToSet _)
+
+ def doArgs(args: List[String]): List[String] = {
+ if (args.isEmpty) return Nil
+ val arg :: rest = args
+ if (arg == "") {
+ // it looks like Ant passes "" sometimes
+ rest
+ }
+ else if (!arg.startsWith("-")) {
+ errorFn("Argument '" + arg + "' does not start with '-'.")
+ args
+ }
+ else if (arg == "-") {
+ errorFn("'-' is not a valid argument.")
+ args
+ }
+ else
+ // we dispatch differently based on the appearance of p:
+ // 1) If it has a : it is presumed to be -Xfoo:bar,baz
+ // 2) If the first two chars are the name of a command, -Dfoo=bar
+ // 3) Otherwise, the whole string should be a command name
+ //
+ // Internally we use Option[List[String]] to discover error,
+ // but the outside expects our arguments back unchanged on failure
+ if (arg contains ":") parseColonArg(arg) match {
+ case Some(_) => rest
+ case None => args
+ }
+ else if (isPropertyArg(arg)) parsePropertyArg(arg) match {
+ case Some(_) => rest
+ case None => args
+ }
+ else parseNormalArg(arg, rest) match {
+ case Some(xs) => xs
+ case None => args
+ }
+ }
+
+ doArgs(args)
+ }
+
+ // a wrapper for all Setting creators to keep our list up to date
+ private def add[T <: Setting](s: T): T = {
+ allSettings += s
+ s
+ }
+
+ def BooleanSetting(name: String, descr: String) = add(new BooleanSetting(name, descr))
+ def ChoiceSetting(name: String, descr: String, choices: List[String], default: String) =
+ add(new ChoiceSetting(name, descr, choices, default))
+ def DefinesSetting() = add(new DefinesSetting())
+ def IntSetting(name: String, descr: String, default: Int, range: Option[(Int, Int)], parser: String => Option[Int]) = add(new IntSetting(name, descr, default, range, parser))
+ def MultiStringSetting(name: String, arg: String, descr: String) = add(new MultiStringSetting(name, arg, descr))
+ def OutputSetting(outputDirs: OutputDirs, default: String) = add(new OutputSetting(outputDirs, default))
+ def PhasesSetting(name: String, descr: String) = add(new PhasesSetting(name, descr))
+ def StringSetting(name: String, arg: String, descr: String, default: String) = add(new StringSetting(name, arg, descr, default))
+ def PathSetting(name: String, arg: String, descr: String, default: String): PathSetting = {
+ val prepend = new StringSetting(name + "/p", "", "", "") with InternalSetting
+ val append = new StringSetting(name + "/a", "", "", "") with InternalSetting
+
+ add[StringSetting](prepend)
+ add[StringSetting](append)
+ add(new PathSetting(name, arg, descr, default, prepend, append))
+ }
+
+ // basically this is a value which remembers if it's been modified
+ trait SettingValue extends AbsSettingValue {
+ protected var v: T
+ protected var setByUser: Boolean = false
+
+ def isDefault: Boolean = !setByUser
+ def value: T = v
+ def value_=(arg: T) = { setByUser = true ; v = arg }
+ }
+
+ /** A class for holding mappings from source directories to
+ * their output location. This functionality can be accessed
+ * only programmatically. The command line compiler uses a
+ * single output location, but tools may use this functionality
+ * to set output location per source directory.
+ */
+ class OutputDirs {
+ /** Pairs of source directory - destination directory. */
+ private var outputDirs: List[(AbstractFile, AbstractFile)] = Nil
+
+ /** If this is not None, the output location where all
+ * classes should go.
+ */
+ private var singleOutDir: Option[AbstractFile] = None
+
+ /** Add a destination directory for sources found under srcdir.
+ * Both directories should exits.
+ */
+ def add(srcDir: String, outDir: String): Unit =
+ add(checkDir(AbstractFile.getDirectory(srcDir), srcDir),
+ checkDir(AbstractFile.getDirectory(outDir), outDir))
+
+ /** Check that dir is exists and is a directory. */
+ private def checkDir(dir: AbstractFile, name: String): AbstractFile = {
+ if ((dir eq null) || !dir.isDirectory)
+ throw new FatalError(name + " does not exist or is not a directory")
+ dir
+ }
+
+ /** Set the single output directory. From now on, all files will
+ * be dumped in there, regardless of previous calls to 'add'.
+ */
+ def setSingleOutput(outDir: String) {
+ val dst = AbstractFile.getDirectory(outDir)
+ setSingleOutput(checkDir(dst, outDir))
+ }
+
+ /** Set the single output directory. From now on, all files will
+ * be dumped in there, regardless of previous calls to 'add'.
+ */
+ def setSingleOutput(dir: AbstractFile) {
+ singleOutDir = Some(dir)
+ }
+
+ def add(src: AbstractFile, dst: AbstractFile) {
+ singleOutDir = None
+ outputDirs ::= (src, dst)
+ }
+
+ /** Return the list of source-destination directory pairs. */
+ def outputs: List[(AbstractFile, AbstractFile)] = outputDirs
+
+ /** Return the output directory for the given file.
+ */
+ def outputDirFor(src: AbstractFile): AbstractFile = {
+ def isBelow(srcDir: AbstractFile, outDir: AbstractFile) =
+ src.path.startsWith(srcDir.path)
+
+ singleOutDir match {
+ case Some(d) => d
+ case None =>
+ (outputs find (isBelow _).tupled) match {
+ case Some((_, d)) => d
+ case _ =>
+ throw new FatalError("Could not find an output directory for "
+ + src.path + " in " + outputs)
+ }
+ }
+ }
+
+ /** Return the source file path(s) which correspond to the given
+ * classfile path and SourceFile attribute value, subject to the
+ * condition that source files are arranged in the filesystem
+ * according to Java package layout conventions.
+ *
+ * The given classfile path must be contained in at least one of
+ * the specified output directories. If it does not then this
+ * method returns Nil.
+ *
+ * Note that the source file is not required to exist, so assuming
+ * a valid classfile path this method will always return a list
+ * containing at least one element.
+ *
+ * Also that if two or more source path elements target the same
+ * output directory there will be two or more candidate source file
+ * paths.
+ */
+ def srcFilesFor(classFile : AbstractFile, srcPath : String) : List[AbstractFile] = {
+ def isBelow(srcDir: AbstractFile, outDir: AbstractFile) =
+ classFile.path.startsWith(outDir.path)
+
+ singleOutDir match {
+ case Some(d) => Nil
+ case None =>
+ (outputs filter (isBelow _).tupled) match {
+ case Nil => Nil
+ case matches => matches.map(_._1.lookupPathUnchecked(srcPath, false))
+ }
+ }
+ }
+ }
+
+ /** A base class for settings of all types.
+ * Subclasses each define a `value' field of the appropriate type.
+ */
+ abstract class Setting(val name: String, val helpDescription: String) extends AbsSetting with SettingValue with Mutable {
+ /** Will be called after this Setting is set for any extra work. */
+ private var _postSetHook: this.type => Unit = (x: this.type) => ()
+ def postSetHook() = { _postSetHook(this) ; this }
+ def withPostSetHook(f: this.type => Unit): this.type = { _postSetHook = f ; this }
+
+ /** The syntax defining this setting in a help string */
+ private var _helpSyntax = name
+ override def helpSyntax: String = _helpSyntax
+ def withHelpSyntax(s: String): this.type = { _helpSyntax = s ; this }
+
+ /** Abbreviations for this setting */
+ private var _abbreviations: List[String] = Nil
+ override def abbreviations = _abbreviations
+ def withAbbreviation(s: String): this.type = { _abbreviations ++= List(s) ; this }
+
+ /** Optional dependency on another setting */
+ private var dependency: Option[(Setting, String)] = None
+ override def dependencies = dependency.toList
+ def dependsOn(s: Setting, value: String): this.type = { dependency = Some((s, value)); this }
+ }
+
+ /** A setting represented by an integer */
+ class IntSetting private[nsc](
+ name: String,
+ descr: String,
+ val default: Int,
+ val range: Option[(Int, Int)],
+ parser: String => Option[Int])
+ extends Setting(name, descr) {
+ type T = Int
+ protected var v = default
+
+ // not stable values!
+ val IntMin = Int.MinValue
+ val IntMax = Int.MaxValue
+ def min = range map (_._1) getOrElse IntMin
+ def max = range map (_._2) getOrElse IntMax
+
+ override def value_=(s: Int) =
+ if (isInputValid(s)) super.value_=(s) else errorMsg
+
+ // Validate that min and max are consistent
+ assert(min <= max)
+
+ // Helper to validate an input
+ private def isInputValid(k: Int): Boolean = (min <= k) && (k <= max)
+
+ // Helper to generate a textual explaination of valid inputs
+ private def getValidText: String = (min, max) match {
+ case (IntMin, IntMax) => "can be any integer"
+ case (IntMin, x) => "must be less than or equal to "+x
+ case (x, IntMax) => "must be greater than or equal to "+x
+ case _ => "must be between %d and %d".format(min, max)
+ }
+
+ // Ensure that the default value is actually valid
+ assert(isInputValid(default))
+
+ def parseArgument(x: String): Option[Int] = {
+ parser(x) orElse {
+ try { Some(x.toInt) }
+ catch { case _: NumberFormatException => None }
+ }
+ }
+
+ def errorMsg = errorFn("invalid setting for -"+name+" "+getValidText)
+
+ def tryToSet(args: List[String]) =
+ if (args.isEmpty) errorAndValue("missing argument", None)
+ else parseArgument(args.head) match {
+ case Some(i) => value = i ; Some(args.tail)
+ case None => errorMsg ; None
+ }
+
+ def unparse: List[String] =
+ if (value == default) Nil
+ else List(name, value.toString)
+ }
+
+ /** A setting represented by a boolean flag (false, unless set) */
+ class BooleanSetting private[nsc](
+ name: String,
+ descr: String)
+ extends Setting(name, descr) {
+ type T = Boolean
+ protected var v = false
+
+ def tryToSet(args: List[String]) = { value = true ; Some(args) }
+ def unparse: List[String] = if (value) List(name) else Nil
+ override def tryToSetFromPropertyValue(s : String) {
+ value = s.equalsIgnoreCase("true")
+ }
+ }
+
+ /** A setting represented by a string, (`default' unless set) */
+ class StringSetting private[nsc](
+ name: String,
+ val arg: String,
+ descr: String,
+ val default: String)
+ extends Setting(name, descr) {
+ type T = String
+ protected var v = default
+
+ def tryToSet(args: List[String]) = args match {
+ case Nil => errorAndValue("missing argument", None)
+ case x :: xs => value = x ; Some(xs)
+ }
+ def unparse: List[String] = if (value == default) Nil else List(name, value)
+
+ withHelpSyntax(name + " <" + arg + ">")
+ }
+
+ class PathSetting private[nsc](
+ name: String,
+ arg: String,
+ descr: String,
+ default: String,
+ prependPath: StringSetting,
+ appendPath: StringSetting)
+ extends StringSetting(name, arg, descr, default) {
+ import ClassPath.join
+ def prepend(s: String) = prependPath.value = join(s, prependPath.value)
+ def append(s: String) = appendPath.value = join(appendPath.value, s)
+
+ override def value = join(
+ prependPath.value,
+ super.value,
+ appendPath.value
+ )
+ }
+
+ /** Set the output directory. */
+ class OutputSetting private[nsc](
+ outputDirs: OutputDirs,
+ default: String)
+ extends StringSetting("-d", "directory", "Specify where to place generated class files", default) {
+ value = default
+ override def value_=(str: String) {
+ super.value_=(str)
+ outputDirs.setSingleOutput(str)
+ }
+ }
+
+ /** A setting that accumulates all strings supplied to it,
+ * until it encounters one starting with a '-'. */
+ class MultiStringSetting private[nsc](
+ name: String,
+ val arg: String,
+ descr: String)
+ extends Setting(name, descr) {
+ type T = List[String]
+ protected var v: List[String] = Nil
+ def appendToValue(str: String) { value ++= List(str) }
+
+ def tryToSet(args: List[String]) = {
+ val (strings, rest) = args span (x => !x.startsWith("-"))
+ strings foreach appendToValue
+
+ Some(rest)
+ }
+ override def tryToSetColon(args: List[String]) = tryToSet(args)
+ override def tryToSetFromPropertyValue(s: String) = tryToSet(s.trim.split(" +").toList)
+ def unparse: List[String] = value map { name + ":" + _ }
+
+ withHelpSyntax(name + ":<" + arg + ">")
+ }
+
+ /** A setting represented by a string in a given set of <code>choices</code>,
+ * (<code>default</code> unless set).
+ */
+ class ChoiceSetting private[nsc](
+ name: String,
+ descr: String,
+ override val choices: List[String],
+ val default: String)
+ extends Setting(name, descr + choices.mkString(" (", ",", ")")) {
+ type T = String
+ protected var v: String = default
+ protected def argument: String = name drop 1
+ def indexOfChoice: Int = choices indexOf value
+
+ def tryToSet(args: List[String]) = { value = default ; Some(args) }
+
+ override def tryToSetColon(args: List[String]) = args match {
+ case Nil => errorAndValue("missing " + argument, None)
+ case List(x) if choices contains x => value = x ; Some(Nil)
+ case List(x) => errorAndValue("'" + x + "' is not a valid choice for '" + name + "'", None)
+ case xs => errorAndValue("'" + name + "' does not accept multiple arguments.", None)
+ }
+ def unparse: List[String] =
+ if (value == default) Nil else List(name + ":" + value)
+
+ withHelpSyntax(name + ":<" + argument + ">")
+ }
+
+ /** A setting represented by a list of strings which should be prefixes of
+ * phase names. This is not checked here, however. Alternatively the string
+ * "all" can be used to represent all phases.
+ * (the empty list, unless set)
+ */
+ class PhasesSetting private[nsc](
+ name: String,
+ descr: String)
+ extends Setting(name, descr + " <phase> or \"all\"") {
+ type T = List[String]
+ protected var v: List[String] = Nil
+ override def value = if (v contains "all") List("all") else super.value
+
+ def tryToSet(args: List[String]) = errorAndValue("missing phase", None)
+ override def tryToSetColon(args: List[String]) = args match {
+ case Nil => errorAndValue("missing phase", None)
+ case xs => value = (value ++ xs).distinct.sorted ; Some(Nil)
+ }
+ // we slightly abuse the usual meaning of "contains" here by returning
+ // true if our phase list contains "all", regardless of the incoming argument
+ def contains(phasename: String): Boolean =
+ doAllPhases || (value exists { phasename startsWith _ } )
+
+ def doAllPhases() = value contains "all"
+ def unparse: List[String] = value map { name + ":" + _ }
+
+ withHelpSyntax(name + ":<phase>")
+ }
+
+ /** A setting for a -D style property definition */
+ class DefinesSetting private[nsc] extends Setting("-D", "set a Java property") {
+ type T = List[(String, String)]
+ protected var v: T = Nil
+ withHelpSyntax(name + "<prop>")
+
+ // given foo=bar returns Some(foo, bar), or None if parse fails
+ def parseArg(s: String): Option[(String, String)] = {
+ if (s == "") return None
+ val idx = s indexOf '='
+
+ if (idx < 0) Some(s, "")
+ else Some(s take idx, s drop (idx + 1))
+ }
+
+ protected[nsc] override def tryToSetProperty(args: List[String]): Option[List[String]] =
+ tryToSet(args)
+
+ def tryToSet(args: List[String]) =
+ if (args.isEmpty) None
+ else parseArg(args.head) match {
+ case None => None
+ case Some((a, b)) => value = value ++ List((a, b)) ; Some(args.tail)
+ }
+
+ def unparse: List[String] =
+ value map { case (k,v) => "-D" + k + (if (v == "") "" else "=" + v) }
+
+ /** Apply the specified properties to the current JVM and returns them. */
+ def applyToJVM() = {
+ value foreach { case (k, v) => System.getProperties.setProperty(k, v) }
+ value
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
new file mode 100644
index 0000000000..f9fd996add
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
@@ -0,0 +1,164 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Martin Odersky
+ */
+// $Id$
+
+package scala.tools.nsc
+package settings
+
+import io.AbstractFile
+import util.{ ClassPath, SourceFile, CommandLineParser }
+import annotation.elidable
+import scala.tools.util.{ PathResolver, StringOps }
+import scala.collection.mutable.{ HashSet, ListBuffer }
+import scala.collection.immutable.TreeSet
+import interpreter.{ returning }
+
+trait ScalaSettings extends AbsScalaSettings with StandardScalaSettings {
+ self: MutableSettings =>
+
+ import PathResolver.{ Defaults, Environment }
+ import Defaults.{ scalaUserClassPath }
+
+ /** Set of settings */
+ protected lazy val allSettings = HashSet[Setting]()
+
+ /** Disable a setting */
+ def disable(s: Setting) = allSettings -= s
+
+ /**
+ * Temporary Settings
+ */
+ val suppressVTWarn = BooleanSetting ("-Ysuppress-vt-typer-warnings", "Suppress warnings from the typer when testing the virtual class encoding, NOT FOR FINAL!")
+ val javaignorecp = BooleanSetting ("-javaignorecp", "Does nothing - is being removed.") // !!! marked for death, but need new starr.
+
+ /**
+ * Standard settings
+ */
+ // argfiles is only for the help message
+ val argfiles = BooleanSetting ("@<file>", "A text file containing compiler arguments (options and source files)")
+ val classpath = PathSetting ("-classpath", "path", "Specify where to find user class files", scalaUserClassPath) .
+ withAbbreviation ("-cp")
+ val d = OutputSetting (outputDirs, ".")
+ val defines = DefinesSetting()
+ val optimise = BooleanSetting ("-optimise", "Generates faster bytecode by applying optimisations to the program") .
+ withAbbreviation("-optimize") .
+ withPostSetHook(_ => List(inline, Xcloselim, Xdce) foreach (_.value = true))
+
+ /**
+ * -X "Advanced" settings
+ */
+ val Xhelp = BooleanSetting ("-X", "Print a synopsis of advanced options")
+ val assemname = StringSetting ("-Xassem-name", "file", "Name of the output assembly (only relevant with -target:msil)", "").dependsOn(target, "msil")
+ val assemrefs = StringSetting ("-Xassem-path", "path", "List of assemblies referenced by the program (only relevant with -target:msil)", ".").dependsOn(target, "msil")
+ val assemextdirs = StringSetting ("-Xassem-extdirs", "dirs", "List of directories containing assemblies, defaults to `lib'", Defaults.scalaLibDir.path).dependsOn(target, "msil")
+ val sourcedir = StringSetting ("-Xsourcedir", "directory", "When -target:msil, the source folder structure is mirrored in output directory.", ".").dependsOn(target, "msil")
+ val checkInit = BooleanSetting ("-Xcheckinit", "Add runtime checks on field accessors. Uninitialized accesses result in an exception being thrown.")
+ val noassertions = BooleanSetting ("-Xdisable-assertions", "Generate no assertions and assumptions")
+ val elidebelow = IntSetting ("-Xelide-below", "Generate calls to @elidable-marked methods only if method priority is greater than argument.",
+ elidable.ASSERTION, None, elidable.byName.get(_))
+ val Xexperimental = BooleanSetting ("-Xexperimental", "Enable experimental extensions")
+ val noForwarders = BooleanSetting ("-Xno-forwarders", "Do not generate static forwarders in mirror classes")
+ val future = BooleanSetting ("-Xfuture", "Turn on future language features")
+ val genPhaseGraph = StringSetting ("-Xgenerate-phase-graph", "file", "Generate the phase graphs (outputs .dot files) to fileX.dot", "")
+ val XlogImplicits = BooleanSetting ("-Xlog-implicits", "Show more info on why some implicits are not applicable")
+ val Xmigration28 = BooleanSetting ("-Xmigration", "Warn about constructs whose behavior may have changed between 2.7 and 2.8")
+ val nouescape = BooleanSetting ("-Xno-uescape", "Disables handling of \\u unicode escapes")
+ val Xnojline = BooleanSetting ("-Xnojline", "Do not use JLine for editing")
+ val plugin = MultiStringSetting("-Xplugin", "file", "Load a plugin from a file")
+ val disable = MultiStringSetting("-Xplugin-disable", "plugin", "Disable a plugin")
+ val showPlugins = BooleanSetting ("-Xplugin-list", "Print a synopsis of loaded plugins")
+ val require = MultiStringSetting("-Xplugin-require", "plugin", "Abort unless a plugin is available")
+ val pluginsDir = StringSetting ("-Xpluginsdir", "path", "Path to search compiler plugins", Defaults.scalaPluginPath)
+ val Xprint = PhasesSetting ("-Xprint", "Print out program after")
+ val writeICode = BooleanSetting ("-Xprint-icode", "Log internal icode to *.icode files")
+ val Xprintpos = BooleanSetting ("-Xprint-pos", "Print tree positions (as offsets)")
+ val printtypes = BooleanSetting ("-Xprint-types", "Print tree types (debugging option)")
+ val prompt = BooleanSetting ("-Xprompt", "Display a prompt after each error (debugging option)")
+ val resident = BooleanSetting ("-Xresident", "Compiler stays resident, files to compile are read from standard input")
+ val script = StringSetting ("-Xscript", "object", "Compile as a script, wrapping the code into object.main()", "")
+ val Xshowcls = StringSetting ("-Xshow-class", "class", "Show class info", "")
+ val Xshowobj = StringSetting ("-Xshow-object", "object", "Show object info", "")
+ val showPhases = BooleanSetting ("-Xshow-phases", "Print a synopsis of compiler phases")
+ val sourceReader = StringSetting ("-Xsource-reader", "classname", "Specify a custom method for reading source files", "scala.tools.nsc.io.SourceReader")
+
+ /** Compatibility stubs for options whose value name did
+ * not previously match the option name.
+ */
+ def XO = optimise
+ def debuginfo = g
+ def dependenciesFile = dependencyfile
+ def nowarnings = nowarn
+ def outdir = d
+ def printLate = print
+
+ /**
+ * -Y "Private" settings
+ */
+ val Yhelp = BooleanSetting ("-Y", "Print a synopsis of private options")
+ val browse = PhasesSetting ("-Ybrowse", "Browse the abstract syntax tree after")
+ val check = PhasesSetting ("-Ycheck", "Check the tree at the end of")
+ val Xcloselim = BooleanSetting ("-Yclosure-elim", "Perform closure elimination")
+ val Ycompacttrees = BooleanSetting ("-Ycompact-trees", "Use compact tree printer when displaying trees")
+ val noCompletion = BooleanSetting ("-Yno-completion", "Disable tab-completion in the REPL")
+ val Xdce = BooleanSetting ("-Ydead-code", "Perform dead code elimination")
+ val debug = BooleanSetting ("-Ydebug", "Output debugging messages")
+ val Xdetach = BooleanSetting ("-Ydetach", "Perform detaching of remote closures")
+ // val doc = BooleanSetting ("-Ydoc", "Generate documentation")
+ val inline = BooleanSetting ("-Yinline", "Perform inlining when possible")
+ val Xlinearizer = ChoiceSetting ("-Ylinearizer", "Linearizer to use", List("normal", "dfs", "rpo", "dump"), "rpo") .
+ withHelpSyntax("-Ylinearizer:<which>")
+ val log = PhasesSetting ("-Ylog", "Log operations in")
+ val Ylogcp = BooleanSetting ("-Ylog-classpath", "Output information about what classpath is being applied.")
+ val Ynogenericsig = BooleanSetting ("-Yno-generic-signatures", "Suppress generation of generic signatures for Java")
+ val noimports = BooleanSetting ("-Yno-imports", "Compile without any implicit imports")
+ val nopredefs = BooleanSetting ("-Yno-predefs", "Compile without any implicit predefined values")
+ val Yrecursion = IntSetting ("-Yrecursion", "Recursion depth used when locking symbols", 0, Some(0, Int.MaxValue), (_: String) => None)
+ val selfInAnnots = BooleanSetting ("-Yself-in-annots", "Include a \"self\" identifier inside of annotations")
+ val Xshowtrees = BooleanSetting ("-Yshow-trees", "Show detailed trees when used in connection with -print:phase")
+ val skip = PhasesSetting ("-Yskip", "Skip")
+ val Xsqueeze = ChoiceSetting ("-Ysqueeze", "if on, creates compact code in matching", List("on","off"), "on") .
+ withHelpSyntax("-Ysqueeze:<enabled>")
+ val Ystatistics = BooleanSetting ("-Ystatistics", "Print compiler statistics")
+ val stop = PhasesSetting ("-Ystop", "Stop after phase")
+ val refinementMethodDispatch =
+ ChoiceSetting ("-Ystruct-dispatch", "Selects dispatch method for structural refinement method calls",
+ List("no-cache", "mono-cache", "poly-cache", "invoke-dynamic"), "poly-cache") .
+ withHelpSyntax("-Ystruct-dispatch:<method>")
+ val specialize = BooleanSetting ("-Yspecialize", "Specialize generic code on types.")
+ val Yrangepos = BooleanSetting ("-Yrangepos", "Use range positions for syntax trees.")
+ val Yidedebug = BooleanSetting ("-Yide-debug", "Generate, validate and output trees using the interactive compiler.")
+ val Ybuilderdebug = ChoiceSetting ("-Ybuilder-debug", "Compile using the specified build manager", List("none", "refined", "simple"), "none") .
+ withHelpSyntax("-Ybuilder-debug:<method>")
+ val Ybuildmanagerdebug =
+ BooleanSetting ("-Ybuild-manager-debug", "Generate debug information for the Refined Build Manager compiler.")
+ val Ytyperdebug = BooleanSetting ("-Ytyper-debug", "Trace all type assignements")
+ val Ypmatdebug = BooleanSetting ("-Ypmat-debug", "Trace all pattern matcher activity.")
+ val Yrepldebug = BooleanSetting ("-Yrepl-debug", "Trace all repl activity.")
+ val Ypmatnaive = BooleanSetting ("-Ypmat-naive", "Desugar matches as naively as possible..")
+ val Ytailrec = BooleanSetting ("-Ytailrecommend", "Alert methods which would be tail-recursive if private or final.")
+ val Yjenkins = BooleanSetting ("-Yjenkins-hashCodes", "Use jenkins hash algorithm for case class generated hashCodes.")
+
+ // Warnings
+ val Ywarnfatal = BooleanSetting ("-Yfatal-warnings", "Fail the compilation if there are any warnings.")
+ val Xwarninit = BooleanSetting ("-Xwarninit", "Warn about possible changes in initialization semantics")
+ val Xchecknull = BooleanSetting ("-Xcheck-null", "Emit warning on selection of nullable reference")
+ val Xwarndeadcode = BooleanSetting ("-Ywarn-dead-code", "Emit warnings for dead code")
+ val YwarnShadow = BooleanSetting ("-Ywarn-shadowing", "Emit warnings about possible variable shadowing.")
+ val YwarnCatches = BooleanSetting ("-Ywarn-catches", "Emit warnings about catch blocks which catch everything.")
+ val Xwarnings = BooleanSetting ("-Xstrict-warnings", "Emit warnings about lots of things.") .
+ withPostSetHook(_ =>
+ List(YwarnShadow, YwarnCatches, Xwarndeadcode, Xwarninit) foreach (_.value = true)
+ )
+ /**
+ * "fsc-specific" settings.
+ */
+ val fscShutdown = BooleanSetting ("-shutdown", "Shutdown the fsc daemon")
+
+ /**
+ * -P "Plugin" settings
+ */
+ val pluginOptions = MultiStringSetting("-P", "plugin:opt", "Pass an option to a plugin") .
+ withHelpSyntax("-P:<plugin>:<opt>")
+}
diff --git a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala
new file mode 100644
index 0000000000..12ae5c9d0e
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala
@@ -0,0 +1,54 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package settings
+
+import scala.tools.util.PathResolver.Defaults
+
+/** Settings which aren't behind a -X, -Y, or -P option.
+ * When possible, the val and the option have identical names.
+ * The abstract settings are commented as to why they are as yet
+ * implemented in MutableSettings rather than mutation-generically.
+ */
+trait StandardScalaSettings {
+ self: AbsScalaSettings =>
+
+ /** Path related settings.
+ */
+ val bootclasspath = PathSetting ("-bootclasspath", "path", "Override location of bootstrap class files", Defaults.scalaBootClassPath)
+ val classpath: PathSetting // is mutated directly in various places (thus inspiring this very effort)
+ val d: OutputSetting // depends on mutable OutputDirs class
+ val extdirs = PathSetting ("-extdirs", "dirs", "Override location of installed extensions", Defaults.scalaExtDirs)
+ val javabootclasspath = PathSetting ("-javabootclasspath", "path", "Override java boot classpath.", Defaults.javaBootClassPath)
+ val javaextdirs = PathSetting ("-javaextdirs", "path", "Override java extdirs classpath.", Defaults.javaExtDirs)
+ val sourcepath = StringSetting ("-sourcepath", "path", "Specify where to find input source files", "")
+
+ /** Other settings.
+ */
+ val dependencyfile = StringSetting ("-dependencyfile", "file", "Specify the file in which dependencies are tracked", ".scala_dependencies")
+ val deprecation = BooleanSetting ("-deprecation", "Output source locations where deprecated APIs are used")
+ val encoding = StringSetting ("-encoding", "encoding", "Specify character encoding used by source files", Properties.sourceEncoding)
+ val explaintypes = BooleanSetting ("-explaintypes", "Explain type errors in more detail")
+ val g = ChoiceSetting ("-g", "Specify level of generated debugging info", List("none", "source", "line", "vars", "notailcalls"), "vars")
+ val help = BooleanSetting ("-help", "Print a synopsis of standard options")
+ val make = ChoiceSetting ("-make", "Specify recompilation detection strategy", List("all", "changed", "immediate", "transitive", "transitivenocp"), "all") .
+ withHelpSyntax("-make:<strategy>")
+ val nowarn = BooleanSetting ("-nowarn", "Generate no warnings")
+ val optimise: BooleanSetting // depends on post hook which mutates other settings
+ val print = BooleanSetting ("-print", "Print program with all Scala-specific features removed")
+ val target = ChoiceSetting ("-target", "Specify for which target object files should be built", List("jvm-1.5", "msil"), "jvm-1.5")
+ val unchecked = BooleanSetting ("-unchecked", "Enable detailed unchecked warnings")
+ val uniqid = BooleanSetting ("-uniqid", "Print identifiers with unique names for debugging")
+ val usejavacp = BooleanSetting ("-usejavacp", "Utilize the java.class.path in classpath resolution.")
+ val verbose = BooleanSetting ("-verbose", "Output messages about what the compiler is doing")
+ val version = BooleanSetting ("-version", "Print product version and exit")
+
+ /** These are @<file> and -Dkey=val style settings, which don't
+ * nicely map to identifiers.
+ */
+ val argfiles: BooleanSetting // exists only to echo help message, should be done differently
+ val defines: DefinesSetting // not entirely clear that DefinesSetting makes sense as a Setting
+}
diff --git a/src/compiler/scala/tools/nsc/symtab/AnnotationCheckers.scala b/src/compiler/scala/tools/nsc/symtab/AnnotationCheckers.scala
index a1b0c4ed06..945d9aa2fe 100644
--- a/src/compiler/scala/tools/nsc/symtab/AnnotationCheckers.scala
+++ b/src/compiler/scala/tools/nsc/symtab/AnnotationCheckers.scala
@@ -21,6 +21,18 @@ trait AnnotationCheckers {
/** Check the annotations on two types conform. */
def annotationsConform(tpe1: Type, tpe2: Type): Boolean
+ /** Refine the computed least upper bound of a list of types.
+ * All this should do is add annotations. */
+ def annotationsLub(tp: Type, ts: List[Type]): Type = tp
+
+ /** Refine the computed greatest lower bound of a list of types.
+ * All this should do is add annotations. */
+ def annotationsGlb(tp: Type, ts: List[Type]): Type = tp
+
+ /** Refine the bounds on type parameters to the given type arguments. */
+ def adaptBoundsToAnnotations(bounds: List[TypeBounds],
+ tparams: List[Symbol], targs: List[Type]): List[TypeBounds] = bounds
+
/** Modify the type that has thus far been inferred
* for a tree. All this should do is add annotations. */
def addAnnotations(tree: Tree, tpe: Type): Type = tpe
@@ -64,6 +76,27 @@ trait AnnotationCheckers {
_.annotationsConform(tp1,tp2))
}
+ /** Refine the computed least upper bound of a list of types.
+ * All this should do is add annotations. */
+ def annotationsLub(tpe: Type, ts: List[Type]): Type = {
+ annotationCheckers.foldLeft(tpe)((tpe, checker) =>
+ checker.annotationsLub(tpe, ts))
+ }
+
+ /** Refine the computed greatest lower bound of a list of types.
+ * All this should do is add annotations. */
+ def annotationsGlb(tpe: Type, ts: List[Type]): Type = {
+ annotationCheckers.foldLeft(tpe)((tpe, checker) =>
+ checker.annotationsGlb(tpe, ts))
+ }
+
+ /** Refine the bounds on type parameters to the given type arguments. */
+ def adaptBoundsToAnnotations(bounds: List[TypeBounds],
+ tparams: List[Symbol], targs: List[Type]): List[TypeBounds] = {
+ annotationCheckers.foldLeft(bounds)((bounds, checker) =>
+ checker.adaptBoundsToAnnotations(bounds, tparams, targs))
+ }
+
/** Let all annotations checkers add extra annotations
* to this tree's type. */
def addAnnotations(tree: Tree, tpe: Type): Type = {
@@ -74,8 +107,7 @@ trait AnnotationCheckers {
/** Find out whether any annotation checker can adapt a tree
* to a given type. Called by Typers.adapt. */
def canAdaptAnnotations(tree: Tree, mode: Int, pt: Type): Boolean = {
- annotationCheckers.foldLeft(false)((res, checker) =>
- res || checker.canAdaptAnnotations(tree, mode, pt))
+ annotationCheckers.exists(_.canAdaptAnnotations(tree, mode, pt))
}
/** Let registered annotation checkers adapt a tree
diff --git a/src/compiler/scala/tools/nsc/symtab/AnnotationInfos.scala b/src/compiler/scala/tools/nsc/symtab/AnnotationInfos.scala
index 93839057d8..efb68c4873 100644
--- a/src/compiler/scala/tools/nsc/symtab/AnnotationInfos.scala
+++ b/src/compiler/scala/tools/nsc/symtab/AnnotationInfos.scala
@@ -11,8 +11,7 @@ import scala.tools.nsc.transform.Reifiers
import util._
/** AnnotationInfo and its helpers */
-trait AnnotationInfos {
- self: SymbolTable =>
+trait AnnotationInfos extends reflect.generic.AnnotationInfos { self: SymbolTable =>
/** Arguments to classfile annotations (which are written to
* bytecode as java annotations) are either:
@@ -33,12 +32,25 @@ trait AnnotationInfos {
override def toString = const.escapedStringValue
}
+ object LiteralAnnotArg extends LiteralAnnotArgExtractor
+
/** Represents an array of classfile annotation arguments */
case class ArrayAnnotArg(args: Array[ClassfileAnnotArg])
extends ClassfileAnnotArg {
override def toString = args.mkString("[", ", ", "]")
}
+ object ArrayAnnotArg extends ArrayAnnotArgExtractor
+
+ /** A specific annotation argument that encodes an array of bytes as an array of `Long`. The type of the argument
+ * declared in the annotation must be `String`. This specialised class is used to encode scala signatures for
+ * reasons of efficiency, both in term of class-file size and in term of compiler performance. */
+ case class ScalaSigBytes(bytes: Array[Byte])
+ extends ClassfileAnnotArg {
+ override def toString = (bytes map { byte => (byte & 0xff).toHexString }).mkString("[ ", " ", " ]")
+ }
+ object ScalaSigBytes extends ScalaSigBytesExtractor
+
/** Represents a nested classfile annotation */
case class NestedAnnotArg(annInfo: AnnotationInfo)
extends ClassfileAnnotArg {
@@ -47,6 +59,8 @@ trait AnnotationInfos {
override def toString = annInfo.toString
}
+ object NestedAnnotArg extends NestedAnnotArgExtractor
+
class AnnotationInfoBase
/** <p>
@@ -60,7 +74,7 @@ trait AnnotationInfos {
* class).
* </p>
* <p>
- * Annotations are pickled (written to scala symbtab attribute
+ * Annotations are pickled (written to scala symtab attribute
* in the classfile) if <code>atp</code> inherits form
* <code>StaticAnnotation</code>.
* </p>
@@ -106,6 +120,11 @@ trait AnnotationInfos {
}
}
+ object AnnotationInfo extends AnnotationInfoExtractor
+
+ lazy val classfileAnnotArgManifest: ClassManifest[ClassfileAnnotArg] =
+ reflect.ClassManifest.classType(classOf[ClassfileAnnotArg])
+
/** Symbol annotations parsed in Namer (typeCompleter of
* definitions) have to be lazy (#1782)
*/
diff --git a/src/compiler/scala/tools/nsc/symtab/Definitions.scala b/src/compiler/scala/tools/nsc/symtab/Definitions.scala
index 155f5332d8..73d5193ed0 100644
--- a/src/compiler/scala/tools/nsc/symtab/Definitions.scala
+++ b/src/compiler/scala/tools/nsc/symtab/Definitions.scala
@@ -11,10 +11,10 @@ import scala.collection.mutable.{HashMap, HashSet}
import scala.tools.nsc.util.{Position, NoPosition}
import Flags._
-trait Definitions {
+trait Definitions extends reflect.generic.StandardDefinitions {
self: SymbolTable =>
- object definitions {
+ object definitions extends AbsDefinitions {
def isDefinitionsInitialized = isInitialized
// Working around bug #2133
@@ -30,7 +30,7 @@ trait Definitions {
val rp=NoSymbol.newValue(NoPosition, nme.ROOTPKG)
.setFlag(FINAL | MODULE | PACKAGE | JAVA)
.setInfo(PolyType(List(), RootClass.tpe))
- RootClass.setSourceModule(rp)
+ RootClass.sourceModule = rp
rp
}
lazy val RootClass: ModuleClassSymbol = NoSymbol.newModuleClass(NoPosition, nme.ROOT.toTypeName)
@@ -84,14 +84,14 @@ trait Definitions {
// the scala value classes
lazy val UnitClass = newClass(ScalaPackageClass, nme.Unit, anyvalparam).setFlag(ABSTRACT | FINAL)
- lazy val ByteClass = newValueClass(nme.Byte, 'B', 1)
- lazy val ShortClass = newValueClass(nme.Short, 'S', 2)
- lazy val CharClass = newValueClass(nme.Char, 'C', 2)
- lazy val IntClass = newValueClass(nme.Int, 'I', 3)
- lazy val LongClass = newValueClass(nme.Long, 'L', 4)
- lazy val FloatClass = newValueClass(nme.Float, 'F', 5)
- lazy val DoubleClass = newValueClass(nme.Double, 'D', 6)
- lazy val BooleanClass = newValueClass(nme.Boolean, 'Z', -1)
+ lazy val ByteClass = newValueClass(nme.Byte, 'B', 2)
+ lazy val ShortClass = newValueClass(nme.Short, 'S', 4)
+ lazy val CharClass = newValueClass(nme.Char, 'C', 3)
+ lazy val IntClass = newValueClass(nme.Int, 'I', 12)
+ lazy val LongClass = newValueClass(nme.Long, 'L', 24)
+ lazy val FloatClass = newValueClass(nme.Float, 'F', 48)
+ lazy val DoubleClass = newValueClass(nme.Double, 'D', 96)
+ lazy val BooleanClass = newValueClass(nme.Boolean, 'Z', 0)
def Boolean_and = getMember(BooleanClass, nme.ZAND)
def Boolean_or = getMember(BooleanClass, nme.ZOR)
@@ -103,7 +103,7 @@ trait Definitions {
// exceptions and other throwables
lazy val ThrowableClass = getClass(sn.Throwable)
lazy val NullPointerExceptionClass = getClass(sn.NPException)
- lazy val NonLocalReturnExceptionClass = getClass(sn.NLRException)
+ lazy val NonLocalReturnControlClass = getClass(sn.NLRControl)
lazy val IndexOutOfBoundsExceptionClass = getClass(sn.IOOBException)
lazy val UninitializedErrorClass = getClass("scala.UninitializedFieldError")
lazy val MatchErrorClass = getClass("scala.MatchError")
@@ -140,10 +140,6 @@ trait Definitions {
// fundamental modules
lazy val PredefModule: Symbol = getModule("scala.Predef")
def Predef_classOf = getMember(PredefModule, nme.classOf)
- def Predef_classOfType(classType: Type): Type =
- if (!ClassClass.unsafeTypeParams.isEmpty && !phase.erasedTypes)
- appliedType(ClassClass.tpe, List(classType))
- else ClassClass.tpe
def Predef_error = getMember(PredefModule, nme.error)
def Predef_identity = getMember(PredefModule, nme.identity)
def Predef_conforms = getMember(PredefModule, nme.conforms)
@@ -155,6 +151,8 @@ trait Definitions {
def arrayUpdateMethod = getMember(ScalaRunTimeModule, "array_update")
def arrayLengthMethod = getMember(ScalaRunTimeModule, "array_length")
def arrayCloneMethod = getMember(ScalaRunTimeModule, "array_clone")
+ def scalaRuntimeHash = getMember(ScalaRunTimeModule, "hash")
+ def scalaRuntimeSameElements = getMember(ScalaRunTimeModule, nme.sameElements)
// classes with special meanings
lazy val NotNullClass = getClass("scala.NotNull")
@@ -240,6 +238,8 @@ trait Definitions {
lazy val CodeModule = getModule(sn.Code)
def Code_lift = getMember(CodeModule, nme.lift_)
+ lazy val ScalaSignatureAnnotation = getClass("scala.reflect.ScalaSignature")
+
// invoke dynamic support
lazy val LinkageModule = getModule("java.dyn.Linkage")
lazy val Linkage_invalidateCallerClass = getMember(LinkageModule, "invalidateCallerClass")
@@ -249,11 +249,11 @@ trait Definitions {
// Option classes
lazy val OptionClass: Symbol = getClass("scala.Option")
lazy val SomeClass: Symbol = getClass("scala.Some")
- lazy val NoneClass: Symbol = getModule("scala.None")
+ lazy val NoneModule: Symbol = getModule("scala.None")
def isOptionType(tp: Type) = cond(tp.normalize) { case TypeRef(_, OptionClass, List(_)) => true }
def isSomeType(tp: Type) = cond(tp.normalize) { case TypeRef(_, SomeClass, List(_)) => true }
- def isNoneType(tp: Type) = cond(tp.normalize) { case TypeRef(_, NoneClass, List(_)) => true }
+ def isNoneType(tp: Type) = cond(tp.normalize) { case TypeRef(_, NoneModule, List(_)) => true }
def optionType(tp: Type) = typeRef(OptionClass.typeConstructor.prefix, OptionClass, List(tp))
def someType(tp: Type) = typeRef(SomeClass.typeConstructor.prefix, SomeClass, List(tp))
@@ -291,6 +291,7 @@ trait Definitions {
lazy val ProductRootClass: Symbol = getClass("scala.Product")
def Product_productArity = getMember(ProductRootClass, nme.productArity)
def Product_productElement = getMember(ProductRootClass, nme.productElement)
+ // def Product_productElementName = getMember(ProductRootClass, nme.productElementName)
def Product_productPrefix = getMember(ProductRootClass, nme.productPrefix)
def Product_canEqual = getMember(ProductRootClass, nme.canEqual_)
@@ -349,9 +350,15 @@ trait Definitions {
def seqType(arg: Type) = typeRef(SeqClass.typeConstructor.prefix, SeqClass, List(arg))
def arrayType(arg: Type) = typeRef(ArrayClass.typeConstructor.prefix, ArrayClass, List(arg))
+ def ClassType(arg: Type) =
+ if (phase.erasedTypes || forMSIL) ClassClass.tpe
+ else appliedType(ClassClass.tpe, List(arg))
+
//
// .NET backend
//
+
+ lazy val ComparatorClass = getClass("scala.runtime.Comparator")
// System.ValueType
lazy val ValueTypeClass: Symbol = getClass(sn.ValueType)
// System.MulticastDelegate
@@ -385,12 +392,14 @@ trait Definitions {
var Any_toString : Symbol = _
var Any_isInstanceOf: Symbol = _
var Any_asInstanceOf: Symbol = _
+ var Any_## : Symbol = _
// members of class java.lang.{Object, String}
var Object_eq : Symbol = _
var Object_ne : Symbol = _
var Object_== : Symbol = _
var Object_!= : Symbol = _
+ var Object_## : Symbol = _
var Object_synchronized: Symbol = _
lazy val Object_isInstanceOf = newPolyMethod(
ObjectClass, "$isInstanceOf",
@@ -423,18 +432,6 @@ trait Definitions {
lazy val BoxedFloatClass = getClass("java.lang.Float")
lazy val BoxedDoubleClass = getClass("java.lang.Double")
- /** The various ways a boxed primitive might materialize at runtime. */
- def isMaybeBoxed(sym: Symbol) =
- if (forMSIL)
- sym isNonBottomSubClass BoxedNumberClass
- else {
- (sym == ObjectClass) ||
- (sym == SerializableClass) ||
- (sym == ComparableClass) ||
- (sym isNonBottomSubClass BoxedNumberClass) ||
- (sym isNonBottomSubClass BoxedCharacterClass)
- }
-
lazy val BoxedUnitClass = getClass("scala.runtime.BoxedUnit")
lazy val BoxedUnitModule = getModule("scala.runtime.BoxedUnit")
def BoxedUnit_UNIT = getMember(BoxedUnitModule, "UNIT")
@@ -443,6 +440,7 @@ trait Definitions {
// special attributes
lazy val SerializableAttr: Symbol = getClass("scala.serializable")
lazy val DeprecatedAttr: Symbol = getClass("scala.deprecated")
+ lazy val MigrationAnnotationClass: Symbol = getClass("scala.annotation.migration")
lazy val BeanPropertyAttr: Symbol = getClass(sn.BeanProperty)
lazy val BooleanBeanPropertyAttr: Symbol = getClass(sn.BooleanBeanProperty)
@@ -574,31 +572,31 @@ trait Definitions {
private def newTypeParam(owner: Symbol, index: Int): Symbol =
owner.newTypeParameter(NoPosition, "T" + index)
- .setInfo(mkTypeBounds(NothingClass.typeConstructor, AnyClass.typeConstructor))
+ .setInfo(TypeBounds(NothingClass.typeConstructor, AnyClass.typeConstructor))
val boxedClass = new HashMap[Symbol, Symbol]
val boxedModule = new HashMap[Symbol, Symbol]
val unboxMethod = new HashMap[Symbol, Symbol] // Type -> Method
val boxMethod = new HashMap[Symbol, Symbol] // Type -> Method
- def isUnbox(m: Symbol) = (m.name == nme.unbox) && cond(m.tpe) {
- case MethodType(_, restpe) => cond(unboxMethod get restpe.typeSymbol) {
- case Some(`m`) => true
- }
- }
-
- /** Test whether a method symbol is that of a boxing method. */
- def isBox(m: Symbol) = (boxMethod.valuesIterator contains m) && cond(m.tpe) {
- case MethodType(List(arg), _) => cond(boxMethod get arg.tpe.typeSymbol) {
- case Some(`m`) => true
- }
- }
+ def isUnbox(m: Symbol) = unboxMethod.valuesIterator contains m
+ def isBox(m: Symbol) = boxMethod.valuesIterator contains m
val refClass = new HashMap[Symbol, Symbol]
val abbrvTag = new HashMap[Symbol, Char]
- val numericWidth = new HashMap[Symbol, Int]
+ private val numericWeight = new HashMap[Symbol, Int]
+
+ def isNumericSubClass(sub: Symbol, sup: Symbol) =
+ numericWeight get sub match {
+ case Some(w1) =>
+ numericWeight get sup match {
+ case Some(w2) => w2 % w1 == 0
+ case None => false
+ }
+ case None => false
+ }
- private def newValueClass(name: Name, tag: Char, width: Int): Symbol = {
+ private[symtab] def newValueClass(name: Name, tag: Char, weight: Int): Symbol = {
val boxedName = sn.Boxed(name)
val clazz = newClass(ScalaPackageClass, name, anyvalparam) setFlag (ABSTRACT | FINAL)
@@ -606,7 +604,7 @@ trait Definitions {
boxedModule(clazz) = getModule(boxedName)
refClass(clazz) = getClass("scala.runtime." + name + "Ref")
abbrvTag(clazz) = tag
- if (width > 0) numericWidth(clazz) = width
+ if (weight > 0) numericWeight(clazz) = weight
val module = ScalaPackageClass.newModule(NoPosition, name)
ScalaPackageClass.info.decls.enter(module)
@@ -691,7 +689,7 @@ trait Definitions {
def addModuleMethod(clazz: Symbol, name: Name, value: Any) {
val owner = clazz.linkedClassOfClass
- newParameterlessMethod(owner, name, mkConstantType(Constant(value)))
+ newParameterlessMethod(owner, name, ConstantType(Constant(value)))
}
addModuleMethod(ByteClass, "MinValue", java.lang.Byte.MIN_VALUE)
addModuleMethod(ByteClass, "MaxValue", java.lang.Byte.MAX_VALUE)
@@ -729,7 +727,7 @@ trait Definitions {
/** Is symbol a numeric value class? */
def isNumericValueClass(sym: Symbol): Boolean =
- numericWidth contains sym
+ numericWeight contains sym
/** Is symbol a numeric value class? */
def isNumericValueType(tp: Type): Boolean = tp match {
@@ -737,6 +735,7 @@ trait Definitions {
case _ => false
}
+ // todo: reconcile with javaSignature!!!
def signature(tp: Type): String = {
def erasure(tp: Type): Type = tp match {
case st: SubType => erasure(st.supertype)
@@ -744,7 +743,7 @@ trait Definitions {
case _ => tp
}
def flatNameString(sym: Symbol, separator: Char): String =
- if (sym.owner.isPackageClass) sym.fullNameString('.') + (if (sym.isModuleClass) "$" else "")
+ if (sym.owner.isPackageClass) sym.fullName('.') + (if (sym.isModuleClass) "$" else "")
else flatNameString(sym.owner, separator) + "$" + sym.simpleName;
def signature1(etp: Type): String = {
if (etp.typeSymbol == ArrayClass) "[" + signature1(erasure(etp.normalize.typeArgs.head))
@@ -777,13 +776,15 @@ trait Definitions {
Any_equals = newMethod(AnyClass, nme.equals_, anyparam, booltype)
Any_hashCode = newMethod(AnyClass, nme.hashCode_, Nil, inttype)
Any_toString = newMethod(AnyClass, nme.toString_, Nil, stringtype)
+ Any_## = newMethod(AnyClass, nme.HASHHASH, Nil, inttype) setFlag FINAL
Any_isInstanceOf = newPolyMethod(
AnyClass, nme.isInstanceOf_, tparam => booltype) setFlag FINAL
Any_asInstanceOf = newPolyMethod(
AnyClass, nme.asInstanceOf_, tparam => tparam.typeConstructor) setFlag FINAL
- // members of class java.lang.{Object, String}
+ // members of class java.lang.{ Object, String }
+ Object_## = newMethod(ObjectClass, nme.HASHHASH, Nil, inttype) setFlag FINAL
Object_== = newMethod(ObjectClass, nme.EQ, anyrefparam, booltype) setFlag FINAL
Object_!= = newMethod(ObjectClass, nme.NE, anyrefparam, booltype) setFlag FINAL
Object_eq = newMethod(ObjectClass, nme.eq, anyrefparam, booltype) setFlag FINAL
@@ -822,7 +823,7 @@ trait Definitions {
// #2264
var tmp = AnnotationDefaultAttr
- tmp = RepeatedParamClass // force initalization
+ tmp = RepeatedParamClass // force initialization
if (forMSIL) {
val intType = IntClass.typeConstructor
val intParam = List(intType)
@@ -834,9 +835,10 @@ trait Definitions {
// additional methods of Object
newMethod(ObjectClass, "clone", List(), AnyRefClass.typeConstructor)
- newMethod(ObjectClass, "wait", List(), unitType)
- newMethod(ObjectClass, "wait", List(longType), unitType)
- newMethod(ObjectClass, "wait", List(longType, intType), unitType)
+ // wait in Java returns void, on .NET Wait returns boolean. by putting
+ // `booltype` the compiler adds a `drop` after calling wait.
+ newMethod(ObjectClass, "wait", List(), booltype)
+ newMethod(ObjectClass, "wait", List(longType), booltype)
newMethod(ObjectClass, "notify", List(), unitType)
newMethod(ObjectClass, "notifyAll", List(), unitType)
@@ -869,7 +871,7 @@ trait Definitions {
var nbScalaCallers: Int = 0
def newScalaCaller(delegateType: Type): Symbol = {
assert(forMSIL, "scalaCallers can only be created if target is .NET")
- // object: reference to object on which to call (scala-)metod
+ // object: reference to object on which to call (scala-)method
val paramTypes: List[Type] = List(ObjectClass.tpe)
val name: String = "$scalaCaller$$" + nbScalaCallers
// tparam => resultType, which is the resultType of PolyType, i.e. the result type after applying the
diff --git a/src/compiler/scala/tools/nsc/symtab/Flags.scala b/src/compiler/scala/tools/nsc/symtab/Flags.scala
index 9901027db6..2978d6cb0a 100644
--- a/src/compiler/scala/tools/nsc/symtab/Flags.scala
+++ b/src/compiler/scala/tools/nsc/symtab/Flags.scala
@@ -7,83 +7,7 @@
package scala.tools.nsc
package symtab
-object Flags {
-
- // modifiers
- final val IMPLICIT = 0x00000200
- final val FINAL = 0x00000020
- final val PRIVATE = 0x00000004
- final val PROTECTED = 0x00000001
-
- final val SEALED = 0x00000400
- final val OVERRIDE = 0x00000002
- final val CASE = 0x00000800
- final val ABSTRACT = 0x00000008 // abstract class, or used in conjunction
- // with abstract override.
- // Note difference to DEFERRED!
-
- final val DEFERRED = 0x00000010 // was `abstract' for members | trait is virtual
- final val METHOD = 0x00000040 // a method
- final val MODULE = 0x00000100 // symbol is module or class implementing a module
- final val INTERFACE = 0x00000080 // symbol is an interface (i.e. a trait which defines only abstract methods)
-
-
- final val MUTABLE = 0x00001000 // symbol is a mutable variable.
- final val PARAM = 0x00002000 // symbol is a (value or type) parameter to a method
- final val PACKAGE = 0x00004000 // symbol is a java package
- // available: 0x00008000
-
- final val COVARIANT = 0x00010000 // symbol is a covariant type variable
- final val CAPTURED = 0x00010000 // variable is accessed from nested function.
- // Set by LambdaLift
- final val BYNAMEPARAM = 0x00010000 // parameter is by name
- final val CONTRAVARIANT = 0x00020000 // symbol is a contravariant type variable
- final val LABEL = 0x00020000 // method symbol is a label. Set by TailCall
- final val INCONSTRUCTOR = 0x00020000 // class symbol is defined in this/superclass
- // constructor.
- final val ABSOVERRIDE = 0x00040000 // combination of abstract & override
- final val LOCAL = 0x00080000 // symbol is local to current class (i.e. private[this] or protected[this]
- // pre: PRIVATE or PROTECTED are also set
- final val JAVA = 0x00100000 // symbol was defined by a Java class
- final val SYNTHETIC = 0x00200000 // symbol is compiler-generated
- final val STABLE = 0x00400000 // functions that are assumed to be stable
- // (typically, access methods for valdefs)
- // or classes that do not contain abstract types.
- final val STATIC = 0x00800000 // static field, method or class
-
- final val CASEACCESSOR = 0x01000000 // symbol is a case parameter (or its accessor)
- final val TRAIT = 0x02000000 // symbol is a trait
- final val DEFAULTPARAM = 0x02000000 // the parameter has a default value
- final val BRIDGE = 0x04000000 // function is a bridge method. Set by Erasure
- final val ACCESSOR = 0x08000000 // a value or variable accessor (getter or setter)
-
- final val SUPERACCESSOR = 0x10000000 // a super accessor
- final val PARAMACCESSOR = 0x20000000 // for value definitions: is an access method
- // for a final val parameter
- // for parameters: is a val parameter
- final val MODULEVAR = 0x40000000 // for variables: is the variable caching a module value
- final val SYNTHETICMETH = 0x40000000 // for methods: synthetic method, but without SYNTHETIC flag
- final val MONOMORPHIC = 0x40000000 // for type symbols: does not have type parameters
- final val LAZY = 0x80000000L // symbol is a lazy val. can't have MUTABLE unless transformed by typer
-
- final val IS_ERROR = 0x100000000L // symbol is an error symbol
- final val OVERLOADED = 0x200000000L // symbol is overloaded
- final val LIFTED = 0x400000000L // class has been lifted out to package level
- // local value has been lifted out to class level
- // todo: make LIFTED = latePRIVATE?
- final val MIXEDIN = 0x800000000L // term member has been mixed in
- final val EXISTENTIAL = 0x800000000L // type is an existential parameter or skolem
-
- final val EXPANDEDNAME = 0x1000000000L // name has been expanded with class suffix
- final val IMPLCLASS = 0x2000000000L // symbol is an implementation class
- final val PRESUPER = 0x2000000000L // value is evaluated before super call
- final val TRANS_FLAG = 0x4000000000L // transient flag guaranteed to be reset
- // after each phase.
-
- final val LOCKED = 0x8000000000L // temporary flag to catch cyclic dependencies
- final val SPECIALIZED = 0x10000000000L// symbol is a generated specialized member
- final val DEFAULTINIT = 0x20000000000L// symbol is a generated specialized member
- final val VBRIDGE = 0x40000000000L// symbol is a varargs bridge
+object Flags extends reflect.generic.Flags {
final val InitialFlags = 0x0001FFFFFFFFFFFFL // flags that are enabled from phase 1.
final val LateFlags = 0x00FE000000000000L // flags that override flags in 0x1FC.
@@ -108,118 +32,6 @@ object Flags {
final val notOVERRIDE = (OVERRIDE: Long) << AntiShift
final val notMETHOD = (METHOD: Long) << AntiShift
- // The flags from 0x001 to 0x800 are different in the raw flags
- // and in the pickled format.
-
- private final val IMPLICIT_PKL = 0x00000001
- private final val FINAL_PKL = 0x00000002
- private final val PRIVATE_PKL = 0x00000004
- private final val PROTECTED_PKL = 0x00000008
-
- private final val SEALED_PKL = 0x00000010
- private final val OVERRIDE_PKL = 0x00000020
- private final val CASE_PKL = 0x00000040
- private final val ABSTRACT_PKL = 0x00000080
-
- private final val DEFERRED_PKL = 0x00000100
- private final val METHOD_PKL = 0x00000200
- private final val MODULE_PKL = 0x00000400
- private final val INTERFACE_PKL = 0x00000800
-
- private final val PKL_MASK = 0x00000FFF
-
-
- private val r2p = {
- def rawFlagsToPickledAux(flags:Int) = {
- var pflags=0
- if ((flags & IMPLICIT )!=0) pflags|=IMPLICIT_PKL
- if ((flags & FINAL )!=0) pflags|=FINAL_PKL
- if ((flags & PRIVATE )!=0) pflags|=PRIVATE_PKL
- if ((flags & PROTECTED)!=0) pflags|=PROTECTED_PKL
- if ((flags & SEALED )!=0) pflags|=SEALED_PKL
- if ((flags & OVERRIDE )!=0) pflags|=OVERRIDE_PKL
- if ((flags & CASE )!=0) pflags|=CASE_PKL
- if ((flags & ABSTRACT )!=0) pflags|=ABSTRACT_PKL
- if ((flags & DEFERRED )!=0) pflags|=DEFERRED_PKL
- if ((flags & METHOD )!=0) pflags|=METHOD_PKL
- if ((flags & MODULE )!=0) pflags|=MODULE_PKL
- if ((flags & INTERFACE)!=0) pflags|=INTERFACE_PKL
- pflags
- }
- val v=new Array[Int](PKL_MASK+1)
- var i=0
- while (i<=PKL_MASK) {
- v(i)=rawFlagsToPickledAux(i)
- i+=1
- }
- v
- }
-
- private val p2r = {
- def pickledToRawFlagsAux(pflags:Int) = {
- var flags=0
- if ((pflags & IMPLICIT_PKL )!=0) flags|=IMPLICIT
- if ((pflags & FINAL_PKL )!=0) flags|=FINAL
- if ((pflags & PRIVATE_PKL )!=0) flags|=PRIVATE
- if ((pflags & PROTECTED_PKL)!=0) flags|=PROTECTED
- if ((pflags & SEALED_PKL )!=0) flags|=SEALED
- if ((pflags & OVERRIDE_PKL )!=0) flags|=OVERRIDE
- if ((pflags & CASE_PKL )!=0) flags|=CASE
- if ((pflags & ABSTRACT_PKL )!=0) flags|=ABSTRACT
- if ((pflags & DEFERRED_PKL )!=0) flags|=DEFERRED
- if ((pflags & METHOD_PKL )!=0) flags|=METHOD
- if ((pflags & MODULE_PKL )!=0) flags|=MODULE
- if ((pflags & INTERFACE_PKL)!=0) flags|=INTERFACE
- flags
- }
- val v=new Array[Int](PKL_MASK+1)
- var i=0
- while (i<=PKL_MASK) {
- v(i)=pickledToRawFlagsAux(i)
- i+=1
- }
- v
- }
-
- def rawFlagsToPickled(flags:Long):Long =
- (flags & ~PKL_MASK) | r2p(flags.toInt & PKL_MASK)
-
- def pickledToRawFlags(pflags:Long):Long =
- (pflags & ~PKL_MASK) | p2r(pflags.toInt & PKL_MASK)
-
- // List of the raw flags, in pickled order
- private val pickledListOrder = {
- def findBit(m:Long):Int = {
- var mask=m
- var i=0
- while (i <= 62) {
- if ((mask&1) == 1L) return i
- mask >>= 1
- i += 1
- }
- throw new FatalError("Internal error: mask is zero")
- }
- val v=new Array[Long](63)
- v(findBit(IMPLICIT_PKL ))=IMPLICIT
- v(findBit(FINAL_PKL ))=FINAL
- v(findBit(PRIVATE_PKL ))=PRIVATE
- v(findBit(PROTECTED_PKL))=PROTECTED
- v(findBit(SEALED_PKL ))=SEALED
- v(findBit(OVERRIDE_PKL ))=OVERRIDE
- v(findBit(CASE_PKL ))=CASE
- v(findBit(ABSTRACT_PKL ))=ABSTRACT
- v(findBit(DEFERRED_PKL ))=DEFERRED
- v(findBit(METHOD_PKL ))=METHOD
- v(findBit(MODULE_PKL ))=MODULE
- v(findBit(INTERFACE_PKL))=INTERFACE
- var i=findBit(PKL_MASK+1)
- while (i <= 62) {
- v(i)=1L << i
- i += 1
- }
- v.toList
- }
-
// masks
/** This flags can be set when class or module symbol is first created. */
final val TopLevelCreationFlags: Long =
@@ -241,10 +53,9 @@ object Flags {
final val FieldFlags: Long =
MUTABLE | CASEACCESSOR | PARAMACCESSOR | STATIC | FINAL | PRESUPER | LAZY
- final val AccessFlags: Long = PRIVATE | PROTECTED
+ final val AccessFlags: Long = PRIVATE | PROTECTED | LOCAL
final val VARIANCES = COVARIANT | CONTRAVARIANT
final val ConstrFlags: Long = JAVA
- final val PickledFlags: Long = 0xFFFFFFFFL
/** Module flags inherited by their module-class */
final val ModuleToClassFlags: Long = AccessFlags | MODULE | PACKAGE | CASE | SYNTHETIC | JAVA
diff --git a/src/compiler/scala/tools/nsc/symtab/Names.scala b/src/compiler/scala/tools/nsc/symtab/Names.scala
index 5f5a220b8a..b0e6c5250a 100644
--- a/src/compiler/scala/tools/nsc/symtab/Names.scala
+++ b/src/compiler/scala/tools/nsc/symtab/Names.scala
@@ -7,7 +7,7 @@
package scala.tools.nsc
package symtab
-import scala.util.NameTransformer
+import scala.reflect.NameTransformer
import scala.io.Codec
import java.security.MessageDigest
@@ -16,7 +16,7 @@ import java.security.MessageDigest
* @author Martin Odersky
* @version 1.0, 05/02/2005
*/
-class Names {
+trait Names extends reflect.generic.Names {
// Operations -------------------------------------------------------------
@@ -164,6 +164,9 @@ class Names {
def newTypeName(bs: Array[Byte], offset: Int, len: Int): Name =
newTermName(bs, offset, len).toTypeName
+ def mkTermName(name: Name) = name.toTermName
+ def mkTypeName(name: Name) = name.toTypeName
+
def nameChars: Array[Char] = chrs
implicit def view(s: String): Name = newTermName(s)
@@ -390,6 +393,8 @@ class Names {
def decode: String = (
NameTransformer.decode(toString()) +
(if (nameDebug && isTypeName) "!" else ""))//debug
+
+ def isOperatorName: Boolean = decode != toString
}
private class TermName(index: Int, len: Int, hash: Int) extends Name(index, len) {
diff --git a/src/compiler/scala/tools/nsc/symtab/Positions.scala b/src/compiler/scala/tools/nsc/symtab/Positions.scala
index e096d3b5e3..58b9164988 100644
--- a/src/compiler/scala/tools/nsc/symtab/Positions.scala
+++ b/src/compiler/scala/tools/nsc/symtab/Positions.scala
@@ -27,10 +27,13 @@ self: scala.tools.nsc.symtab.SymbolTable =>
/** Ensure that given tree has no positions that overlap with
* any of the positions of `others`. This is done by
- * shortening the range or assinging TransparentPositions
+ * shortening the range or assigning TransparentPositions
* to some of the nodes in `tree`.
*/
def ensureNonOverlapping(tree: Tree, others: List[Tree]) {}
def validatePositions(tree: Tree) {}
+
+ type Position = scala.tools.nsc.util.Position
+ val NoPosition = scala.tools.nsc.util.NoPosition
}
diff --git a/src/compiler/scala/tools/nsc/symtab/Scopes.scala b/src/compiler/scala/tools/nsc/symtab/Scopes.scala
index dcd9e76b62..f305f20d2a 100644
--- a/src/compiler/scala/tools/nsc/symtab/Scopes.scala
+++ b/src/compiler/scala/tools/nsc/symtab/Scopes.scala
@@ -44,11 +44,11 @@ trait Scopes {
e
}
- class Scope(initElems: ScopeEntry) extends Iterable[Symbol] {
+ class Scope(initElems: ScopeEntry) extends AbsScope {
var elems: ScopeEntry = initElems
- /** The number of times this scope is neted in another
+ /** The number of times this scope is nested in another
*/
private var nestinglevel = 0
@@ -275,10 +275,39 @@ trait Scopes {
elemsCache
}
- /** Return all symbols as an interator in the order they were entered in this scope.
+ /** Return the nesting level of this scope, i.e. the number of times this scope
+ * was nested in another */
+ def nestingLevel = nestinglevel
+
+ /** Return all symbols as an iterator in the order they were entered in this scope.
*/
def iterator: Iterator[Symbol] = toList.iterator
+/*
+ /** Does this scope contain an entry for `sym`?
+ */
+ def contains(sym: Symbol): Boolean = lookupAll(sym.name) contains sym
+
+ /** A scope that contains all symbols of this scope and that also contains `sym`.
+ */
+ def +(sym: Symbol): Scope =
+ if (contains(sym)) this
+ else {
+ val result = cloneScope
+ result enter sym
+ result
+ }
+
+ /** A scope that contains all symbols of this scope except `sym`.
+ */
+ def -(sym: Symbol): Scope =
+ if (!contains(sym)) this
+ else {
+ val result = cloneScope
+ result unlink sym
+ result
+ }
+*/
override def foreach[U](p: Symbol => U): Unit = toList foreach p
override def filter(p: Symbol => Boolean): Scope =
@@ -287,18 +316,17 @@ trait Scopes {
override def mkString(start: String, sep: String, end: String) =
toList.map(_.defString).mkString(start, sep, end)
- override def toString(): String = mkString("{\n ", ";\n ", "\n}")
+ override def toString(): String = mkString("Scope{\n ", ";\n ", "\n}")
- /** Return the nesting level of this scope, i.e. the number of times this scope
- * was nested in another */
- def nestingLevel = nestinglevel
}
+ def newScope: Scope = new Scope
+
/** The empty scope (immutable).
*/
object EmptyScope extends Scope {
override def enter(e: ScopeEntry) {
- throw new Error("EmptyScope.enter")
+ abort("EmptyScope.enter")
}
}
diff --git a/src/compiler/scala/tools/nsc/symtab/StdNames.scala b/src/compiler/scala/tools/nsc/symtab/StdNames.scala
index e762448462..5c7e7925ea 100644
--- a/src/compiler/scala/tools/nsc/symtab/StdNames.scala
+++ b/src/compiler/scala/tools/nsc/symtab/StdNames.scala
@@ -7,12 +7,12 @@
package scala.tools.nsc
package symtab
-import scala.util.NameTransformer
+import scala.reflect.NameTransformer
+import util.Chars.isOperatorPart
-trait StdNames {
- self: SymbolTable =>
+trait StdNames extends reflect.generic.StdNames { self: SymbolTable =>
- object nme {
+ object nme extends StandardNames {
// Scala keywords; enter them first to minimize scanner.maxKey
val ABSTRACTkw = newTermName("abstract")
@@ -73,7 +73,6 @@ trait StdNames {
val LOCALDUMMY_PREFIX_STRING = "<local "
val SUPER_PREFIX_STRING = "super$"
- val EXPAND_SEPARATOR_STRING = "$$"
val TRAIT_SETTER_SEPARATOR_STRING = "$_setter_$"
val TUPLE_FIELD_PREFIX_STRING = "_"
val CHECK_IF_REFUTABLE_STRING = "check$ifrefutable$"
@@ -88,7 +87,7 @@ trait StdNames {
def LOCAL(clazz: Symbol) = newTermName(LOCALDUMMY_PREFIX_STRING + clazz.name+">")
def TUPLE_FIELD(index: Int) = newTermName(TUPLE_FIELD_PREFIX_STRING + index)
- val LOCAL_SUFFIX = newTermName(" ")
+ val LOCAL_SUFFIX = newTermName(LOCAL_SUFFIX_STRING)
val SETTER_SUFFIX = encode("_=")
val IMPL_CLASS_SUFFIX = newTermName("$class")
val MODULE_SUFFIX = newTermName("$module")
@@ -103,22 +102,16 @@ trait StdNames {
def isTraitSetterName(name: Name) = isSetterName(name) && name.pos(TRAIT_SETTER_SEPARATOR_STRING) < name.length
def isOpAssignmentName(name: Name) =
name(name.length - 1) == '=' &&
- isOperatorCharacter(name(0)) &&
+ isOperatorPart(name(0)) &&
name(0) != '=' && name != NEraw && name != LEraw && name != GEraw
- def isOperatorCharacter(c: Char) = c match {
- case '~' | '!' | '@' | '#' | '%' |
- '^' | '*' | '+' | '-' | '<' |
- '>' | '?' | ':' | '=' | '&' |
- '|' | '\\'| '/' => true
- case _ =>
- val chtp = Character.getType(c)
- chtp == Character.MATH_SYMBOL.toInt || chtp == Character.OTHER_SYMBOL.toInt
- }
+ /** The expanded setter name of `name' relative to this class `base`
+ */
+ def expandedSetterName(name: Name, base: Symbol): Name =
+ expandedName(name, base, separator = TRAIT_SETTER_SEPARATOR_STRING)
/** If `name' is an expandedName name, the original name.
* Otherwise `name' itself.
- * @see Symbol.expandedName
*/
def originalName(name: Name): Name = {
var i = name.length
@@ -183,19 +176,11 @@ trait StdNames {
val LOCALCHILD = newTypeName("<local child>")
val NOSYMBOL = newTermName("<none>")
- val EMPTY = newTermName("")
val ANYNAME = newTermName("<anyname>")
val WILDCARD = newTermName("_")
val WILDCARD_STAR = newTermName("_*")
- val ANON_CLASS_NAME = newTermName("$anon")
- val ANON_FUN_NAME = newTermName("$anonfun")
- val REFINE_CLASS_NAME = newTermName("<refinement>")
- val EMPTY_PACKAGE_NAME = newTermName("<empty>")
- val IMPORT = newTermName("<import>")
val STAR = newTermName("*")
- val ROOT = newTermName("<root>")
- val ROOTPKG = newTermName("_root_")
val REPEATED_PARAM_CLASS_NAME = newTermName("<repeated>")
val JAVA_REPEATED_PARAM_CLASS_NAME = newTermName("<repeated...>")
val BYNAME_PARAM_CLASS_NAME = newTermName("<byname>")
@@ -220,6 +205,7 @@ trait StdNames {
val PERCENT = encode("%")
val EQL = encode("=")
val USCOREEQL = encode("_=")
+ val HASHHASH = encode("##")
val Nothing = newTermName("Nothing")
val Null = newTermName("Null")
@@ -268,6 +254,7 @@ trait StdNames {
val assume_ = newTermName("assume")
val asInstanceOf_ = newTermName("asInstanceOf")
val box = newTermName("box")
+ val bytes = newTermName("bytes")
val canEqual_ = newTermName("canEqual")
val checkInitialized = newTermName("checkInitialized")
val classOf = newTermName("classOf")
@@ -295,10 +282,12 @@ trait StdNames {
val getCause = newTermName("getCause")
val getClass_ = newTermName("getClass")
val getMethod_ = newTermName("getMethod")
+ val hash_ = newTermName("hash")
val hashCode_ = newTermName("hashCode")
val hasNext = newTermName("hasNext")
val head = newTermName("head")
val invoke_ = newTermName("invoke")
+ val isArray = newTermName("isArray")
val isInstanceOf_ = newTermName("isInstanceOf")
val isDefinedAt = newTermName("isDefinedAt")
val isEmpty = newTermName("isEmpty")
@@ -320,6 +309,7 @@ trait StdNames {
val print = newTermName("print")
val productArity = newTermName("productArity")
val productElement = newTermName("productElement")
+ val productElementName = newTermName("productElementName")
val productPrefix = newTermName("productPrefix")
val readResolve = newTermName("readResolve")
val sameElements = newTermName("sameElements")
@@ -376,6 +366,7 @@ trait StdNames {
val NEraw = newTermName("!=")
val LEraw = newTermName("<=")
val GEraw = newTermName(">=")
+ val DOLLARraw = newTermName("$")
// value-conversion methods
val toByte = newTermName("toByte")
@@ -418,7 +409,7 @@ trait StdNames {
val String : Name
val Throwable : Name
val NPException : Name // NullPointerException
- val NLRException : Name = newTermName("scala.runtime.NonLocalReturnException")
+ val NLRControl : Name = newTermName("scala.runtime.NonLocalReturnControl")
val ValueType : Name
val Serializable : Name
val BeanProperty : Name
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
index 3daa4e8ac2..947f5a8f88 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
@@ -11,10 +11,10 @@ import java.io.{File, IOException}
import ch.epfl.lamp.compiler.msil.{Type => MSILType, Attribute => MSILAttribute}
-import scala.collection.mutable.{HashMap, HashSet}
+import scala.collection.mutable.{HashMap, HashSet, ListBuffer}
import scala.compat.Platform.currentTime
import scala.tools.nsc.io.AbstractFile
-import scala.tools.nsc.util.{Position, NoPosition, ClassPath, ClassRep, JavaClassPath, MsilClassPath}
+import scala.tools.nsc.util.{ ClassPath, JavaClassPath }
import classfile.ClassfileParser
import Flags._
@@ -64,7 +64,7 @@ abstract class SymbolLoaders {
informTime("loaded " + description, start)
ok = true
setSource(root)
- setSource(root.linkedSym) // module -> class, class -> module
+ setSource(root.companionSymbol) // module -> class, class -> module
} catch {
case ex: IOException =>
ok = false
@@ -75,7 +75,7 @@ abstract class SymbolLoaders {
else "error while loading " + root.name + ", " + msg);
}
initRoot(root)
- if (!root.isPackageClass) initRoot(root.linkedSym)
+ if (!root.isPackageClass) initRoot(root.companionSymbol)
}
override def load(root: Symbol) { complete(root) }
@@ -110,7 +110,7 @@ abstract class SymbolLoaders {
def enterClassAndModule(root: Symbol, name: String, completer: SymbolLoader) {
val owner = if (root.isRoot) definitions.EmptyPackageClass else root
val className = newTermName(name)
- assert(owner.info.decls.lookup(name) == NoSymbol, owner.fullNameString + "." + name)
+ assert(owner.info.decls.lookup(name) == NoSymbol, owner.fullName + "." + name)
val clazz = owner.newClass(NoPosition, name.toTypeName)
val module = owner.newModule(NoPosition, name)
clazz setInfo completer
@@ -118,12 +118,12 @@ abstract class SymbolLoaders {
module.moduleClass setInfo moduleClassLoader
owner.info.decls enter clazz
owner.info.decls enter module
- assert(clazz.linkedModuleOfClass == module, module)
- assert(module.linkedClassOfModule == clazz, clazz)
+ assert(clazz.companionModule == module, module)
+ assert(module.companionClass == clazz, clazz)
}
/**
- * Tells wether a class with both a binary and a source representation
+ * Tells whether a class with both a binary and a source representation
* (found in classpath and in sourcepath) should be re-compiled. Behaves
* similar to javac, i.e. if the source file is newer than the classfile,
* a re-compile is triggered.
@@ -131,12 +131,12 @@ abstract class SymbolLoaders {
protected def needCompile(bin: T, src: AbstractFile): Boolean
/**
- * Tells wether a class should be loaded and entered into the package
+ * Tells whether a class should be loaded and entered into the package
* scope. On .NET, this method returns `false' for all synthetic classes
* (anonymous classes, implementation classes, module classes), their
* symtab is encoded in the pickle of another class.
*/
- protected def doLoad(cls: ClassRep[T]): Boolean
+ protected def doLoad(cls: classpath.AnyClassRep): Boolean
protected def newClassLoader(bin: T): SymbolLoader
@@ -197,12 +197,11 @@ abstract class SymbolLoaders {
}
}
-
class JavaPackageLoader(classpath: ClassPath[AbstractFile]) extends PackageLoader(classpath) {
protected def needCompile(bin: AbstractFile, src: AbstractFile) =
(src.lastModified >= bin.lastModified)
- protected def doLoad(cls: ClassRep[AbstractFile]) = true
+ protected def doLoad(cls: classpath.AnyClassRep) = true
protected def newClassLoader(bin: AbstractFile) =
new ClassfileLoader(bin)
@@ -215,7 +214,7 @@ abstract class SymbolLoaders {
protected def needCompile(bin: MSILType, src: AbstractFile) =
false // always use compiled file on .net
- protected def doLoad(cls: ClassRep[MSILType]) = {
+ protected def doLoad(cls: classpath.AnyClassRep) = {
if (cls.binary.isDefined) {
val typ = cls.binary.get
if (typ.IsDefined(clrTypes.SCALA_SYMTAB_ATTR, false)) {
@@ -276,4 +275,8 @@ abstract class SymbolLoaders {
val global: SymbolLoaders.this.global.type = SymbolLoaders.this.global
if (global.forMSIL) init()
}
+
+ /** used from classfile parser to avoid cyclies */
+ var parentsLevel = 0
+ var pendingLoadActions: List[() => Unit] = Nil
}
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala b/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala
index 2a70c55cd6..75902568fa 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala
@@ -6,28 +6,33 @@
package scala.tools.nsc
package symtab
-import ast.{Trees, DocComments}
+
+import ast.{Trees, TreePrinters, DocComments}
import util._
-abstract class SymbolTable extends Names
+abstract class SymbolTable extends reflect.generic.Universe
+ with Names
with Symbols
with Types
with Scopes
with Definitions
- with Constants
+ with reflect.generic.Constants
with BaseTypeSeqs
with InfoTransformers
with StdNames
with AnnotationInfos
with AnnotationCheckers
with Trees
+ with TreePrinters
with Positions
with DocComments
{
def settings: Settings
def rootLoader: LazyType
def log(msg: AnyRef)
+ def abort(msg: String) = throw new Error(msg)
+ def abort() = throw new Error()
/** Are we compiling for Java SE ? */
def forJVM: Boolean
diff --git a/src/compiler/scala/tools/nsc/symtab/Symbols.scala b/src/compiler/scala/tools/nsc/symtab/Symbols.scala
index 5ee7409cc7..f50ace8426 100644
--- a/src/compiler/scala/tools/nsc/symtab/Symbols.scala
+++ b/src/compiler/scala/tools/nsc/symtab/Symbols.scala
@@ -17,15 +17,13 @@ import Flags._
//todo: get rid of MONOMORPHIC flag
-trait Symbols {
- self: SymbolTable =>
+trait Symbols extends reflect.generic.Symbols { self: SymbolTable =>
import definitions._
private var ids = 0
def symbolCount = ids // statistics
val emptySymbolArray = new Array[Symbol](0)
- val emptySymbolSet = Set.empty[Symbol]
/** Used for deciding in the IDE whether we can interrupt the compiler */
protected var activeLocks = 0
@@ -51,7 +49,7 @@ trait Symbols {
}
*/
/** The class for all symbols */
- abstract class Symbol(initOwner: Symbol, initPos: Position, initName: Name) {
+ abstract class Symbol(initOwner: Symbol, initPos: Position, initName: Name) extends AbsSymbol {
var rawowner = initOwner
var rawname = initName
@@ -102,14 +100,14 @@ trait Symbols {
private var rawannots: List[AnnotationInfoBase] = Nil
- /* Used in namer to check wether annotations were already assigned or not */
+ /* Used in namer to check whether annotations were already assigned or not */
def rawAnnotations:List[AnnotationInfoBase] = rawannots
/** After the typer phase (before, look at the definition's Modifiers), contains
* the annotations attached to member a definition (class, method, type, field).
*/
def annotations: List[AnnotationInfo] = {
- // .initialize: the type completer o f the symbol parses the annotations,
+ // .initialize: the type completer of the symbol parses the annotations,
// see "def typeSig" in Namers
val annots1 = initialize.rawannots map {
case LazyAnnotationInfo(annot) => annot()
@@ -124,8 +122,9 @@ trait Symbols {
this
}
- def addAnnotation(annot: AnnotationInfo): this.type =
+ override def addAnnotation(annot: AnnotationInfo) {
setAnnotations(annot :: this.rawannots)
+ }
/** Does this symbol have an annotation of the given class? */
def hasAnnotation(cls: Symbol) =
@@ -134,6 +133,14 @@ trait Symbols {
def getAnnotation(cls: Symbol): Option[AnnotationInfo] =
annotations find (_.atp.typeSymbol == cls)
+ /** Finds the requested annotation and returns Some(Tree) containing
+ * the argument at position 'index', or None if either the annotation
+ * or the index does not exist.
+ */
+ private def getAnnotationArg(cls: Symbol, index: Int) =
+ for (AnnotationInfo(_, args, _) <- getAnnotation(cls) ; if args.size > index) yield
+ args(index)
+
/** Remove all annotations matching the given class. */
def removeAnnotation(cls: Symbol): Unit =
setAnnotations(annotations filterNot (_.atp.typeSymbol == cls))
@@ -157,12 +164,16 @@ trait Symbols {
* Java protected: PROTECTED flag set, privateWithin == enclosing package
* Java public: no flag set, privateWithin == NoSymbol
*/
- var privateWithin: Symbol = _
+ private[this] var _privateWithin: Symbol = _
+ def privateWithin = _privateWithin
+ override def privateWithin_=(sym: Symbol) { _privateWithin = sym }
// Creators -------------------------------------------------------------------
final def newValue(pos: Position, name: Name) =
new TermSymbol(this, pos, name)
+ final def newValue(name: Name, pos: Position = NoPosition) =
+ new TermSymbol(this, pos, name)
final def newVariable(pos: Position, name: Name) =
newValue(pos, name).setFlag(MUTABLE)
final def newValueParameter(pos: Position, name: Name) =
@@ -171,7 +182,9 @@ trait Symbols {
final def newLocalDummy(pos: Position) =
newValue(pos, nme.LOCAL(this)).setInfo(NoType)
final def newMethod(pos: Position, name: Name) =
- newValue(pos, name).setFlag(METHOD)
+ new MethodSymbol(this, pos, name).setFlag(METHOD)
+ final def newMethod(name: Name, pos: Position = NoPosition) =
+ new MethodSymbol(this, pos, name).setFlag(METHOD)
final def newLabel(pos: Position, name: Name) =
newMethod(pos, name).setFlag(LABEL)
final def newConstructor(pos: Position) =
@@ -179,6 +192,9 @@ trait Symbols {
final def newModule(pos: Position, name: Name, clazz: ClassSymbol) =
new ModuleSymbol(this, pos, name).setFlag(MODULE | FINAL)
.setModuleClass(clazz)
+ final def newModule(name: Name, clazz: Symbol, pos: Position = NoPosition) =
+ new ModuleSymbol(this, pos, name).setFlag(MODULE | FINAL)
+ .setModuleClass(clazz.asInstanceOf[ClassSymbol])
final def newModule(pos: Position, name: Name) = {
val m = new ModuleSymbol(this, pos, name).setFlag(MODULE | FINAL)
m.setModuleClass(new ModuleClassSymbol(m))
@@ -234,11 +250,15 @@ trait Symbols {
*/
final def newAliasType(pos: Position, name: Name) =
new TypeSymbol(this, pos, name)
+ final def newAliasType(name: Name, pos: Position = NoPosition) =
+ new TypeSymbol(this, pos, name)
/** Symbol of an abstract type type T >: ... <: ...
*/
final def newAbstractType(pos: Position, name: Name) =
new TypeSymbol(this, pos, name).setFlag(DEFERRED)
+ final def newAbstractType(name: Name, pos: Position = NoPosition) =
+ new TypeSymbol(this, pos, name).setFlag(DEFERRED)
/** Symbol of a type parameter
*/
@@ -291,9 +311,13 @@ trait Symbols {
final def newClass(pos: Position, name: Name) =
new ClassSymbol(this, pos, name)
+ final def newClass(name: Name, pos: Position = NoPosition) =
+ new ClassSymbol(this, pos, name)
final def newModuleClass(pos: Position, name: Name) =
new ModuleClassSymbol(this, pos, name)
+ final def newModuleClass(name: Name, pos: Position = NoPosition) =
+ new ModuleClassSymbol(this, pos, name)
final def newAnonymousClass(pos: Position) =
newClass(pos, nme.ANON_CLASS_NAME.toTypeName)
@@ -362,18 +386,13 @@ trait Symbols {
// Tests ----------------------------------------------------------------------
- def isTerm = false //to be overridden
- def isType = false //to be overridden
- def isClass = false //to be overridden
- def isTypeMember = false //to be overridden todo: rename, it's something
- // whose definition starts with `type', i.e. a type
- // which is not a class.
- def isAliasType = false //to be overridden
- def isAbstractType = false //to be overridden
- def isSkolem = false //to be overridden
+ /** Is this symbol a type but not a class? */
+ def isNonClassType = false
- /** Term symbols with the exception of static parts of Java classes and packages */
- final def isValue = isTerm && !(isModule && hasFlag(PACKAGE | JAVA))
+ /** Term symbols with the exception of static parts of Java classes and packages
+ * and the faux companion objects of primitives. (See tickets #1392 and #3123.)
+ */
+ final def isValue = isTerm && !(isModule && (hasFlag(PACKAGE | JAVA) || isValueClass(companionClass)))
final def isVariable = isTerm && hasFlag(MUTABLE) && !isMethod
@@ -388,29 +407,23 @@ trait Symbols {
final def isValueParameter = isTerm && hasFlag(PARAM)
final def isLocalDummy = isTerm && nme.isLocalDummyName(name)
- final def isMethod = isTerm && hasFlag(METHOD)
- final def isSourceMethod = isTerm && (flags & (METHOD | STABLE)) == METHOD.toLong // ???
final def isLabel = isMethod && !hasFlag(ACCESSOR) && hasFlag(LABEL)
final def isInitializedToDefault = !isType && (getFlag(DEFAULTINIT | ACCESSOR) == (DEFAULTINIT | ACCESSOR))
final def isClassConstructor = isTerm && (name == nme.CONSTRUCTOR)
final def isMixinConstructor = isTerm && (name == nme.MIXIN_CONSTRUCTOR)
final def isConstructor = isTerm && (name == nme.CONSTRUCTOR) || (name == nme.MIXIN_CONSTRUCTOR)
- final def isModule = isTerm && hasFlag(MODULE)
final def isStaticModule = isModule && isStatic && !isMethod
- final def isPackage = isModule && hasFlag(PACKAGE)
final def isThisSym = isTerm && owner.thisSym == this
//final def isMonomorphicType = isType && hasFlag(MONOMORPHIC)
final def isError = hasFlag(IS_ERROR)
final def isErroneous = isError || isInitialized && tpe.isErroneous
- final def isTrait = isClass && hasFlag(TRAIT | notDEFERRED) // A virtual class becomes a trait (part of DEVIRTUALIZE)
+ override final def isTrait: Boolean = isClass && hasFlag(TRAIT | notDEFERRED) // A virtual class becomes a trait (part of DEVIRTUALIZE)
final def isTypeParameterOrSkolem = isType && hasFlag(PARAM)
final def isTypeSkolem = isSkolem && hasFlag(PARAM)
- final def isTypeParameter = isTypeParameterOrSkolem && !isSkolem
// a type symbol bound by an existential type, for instance the T in
// List[T] forSome { type T }
- final def isExistential = isType && hasFlag(EXISTENTIAL)
- final def isExistentialSkolem = isSkolem && hasFlag(EXISTENTIAL)
- final def isExistentialQuantified = isExistential && !isSkolem
+ final def isExistentialSkolem = isExistentiallyBound && isSkolem
+ final def isExistentialQuantified = isExistentiallyBound && !isSkolem
// class C extends D( { class E { ... } ... } ). Here, E is a class local to a constructor
final def isClassLocalToConstructor = isClass && hasFlag(INCONSTRUCTOR)
@@ -418,24 +431,17 @@ trait Symbols {
final def isAnonymousClass = isClass && (originalName startsWith nme.ANON_CLASS_NAME) // todo: find out why we can't use containsName here.
final def isAnonymousFunction = hasFlag(SYNTHETIC) && (name containsName nme.ANON_FUN_NAME)
- final def isRefinementClass = isClass && name == nme.REFINE_CLASS_NAME.toTypeName; // no lifting for refinement classes
- final def isModuleClass = isClass && hasFlag(MODULE)
final def isClassOfModule = isModuleClass || isClass && nme.isLocalName(name)
- final def isPackageClass = isClass && hasFlag(PACKAGE)
final def isPackageObject = isModule && name == nme.PACKAGEkw && owner.isPackageClass
final def isPackageObjectClass = isModuleClass && name.toTermName == nme.PACKAGEkw && owner.isPackageClass
final def definedInPackage = owner.isPackageClass || owner.isPackageObjectClass
- final def isRoot = isPackageClass && name == nme.ROOT.toTypeName
- final def isRootPackage = isPackage && name == nme.ROOTPKG
- final def isEmptyPackage = isPackage && name == nme.EMPTY_PACKAGE_NAME
- final def isEmptyPackageClass = isPackageClass && name == nme.EMPTY_PACKAGE_NAME.toTypeName
final def isPredefModule = isModule && name == nme.Predef && owner.isScalaPackageClass // not printed as a prefix
final def isScalaPackage = isPackage && name == nme.scala_ && owner.isRoot || // not printed as a prefix
isPackageObject && owner.isScalaPackageClass
final def isScalaPackageClass: Boolean = isPackageClass && owner.isRoot && name == nme.scala_.toTypeName ||
isPackageObjectClass && owner.isScalaPackageClass // not printed as a prefix
- /** Is symbol a monomophic type?
+ /** Is symbol a monomorphic type?
* assumption: if a type starts out as monomorphic, it will not acquire
* type parameters in later phases.
*/
@@ -448,26 +454,19 @@ trait Symbols {
}
}
- def isDeprecated = hasAnnotation(DeprecatedAttr)
- def deprecationMessage: Option[String] =
- annotations find (_.atp.typeSymbol == DeprecatedAttr) flatMap { annot =>
- annot.args match {
- case Literal(const) :: Nil =>
- Some(const.stringValue)
- case _ =>
- None
- }
- }
- def elisionLevel: Option[Int] = {
- if (!hasAnnotation(ElidableMethodClass)) None
- else annotations find (_.atp.typeSymbol == ElidableMethodClass) flatMap { annot =>
- // since we default to enabled by default, only look hard for falsity
- annot.args match {
- case Literal(Constant(x: Int)) :: Nil => Some(x)
- case _ => None
- }
- }
+ def isDeprecated = hasAnnotation(DeprecatedAttr)
+ def deprecationMessage = getAnnotationArg(DeprecatedAttr, 0) collect { case Literal(const) => const.stringValue }
+ // !!! when annotation arguments are not literal strings, but any sort of
+ // assembly of strings, there is a fair chance they will turn up here not as
+ // Literal(const) but some arbitrary AST. However nothing in the compiler
+ // prevents someone from writing a @migration annotation with a calculated
+ // string. So this needs attention. For now the fact that migration is
+ // private[scala] ought to provide enough protection.
+ def migrationMessage = getAnnotationArg(MigrationAnnotationClass, 2) collect {
+ case Literal(const) => const.stringValue
+ case x => x.toString // should not be necessary, but better than silently ignoring an issue
}
+ def elisionLevel = getAnnotationArg(ElidableMethodClass, 0) collect { case Literal(Constant(x: Int)) => x }
/** Does this symbol denote a wrapper object of the interpreter or its class? */
final def isInterpreterWrapper =
@@ -476,6 +475,8 @@ trait Symbols {
name.toString.startsWith(nme.INTERPRETER_LINE_PREFIX) &&
name.toString.endsWith(nme.INTERPRETER_WRAPPER_SUFFIX)
+ override def isEffectiveRoot = super.isEffectiveRoot || isInterpreterWrapper
+
/** Is this symbol an accessor method for outer? */
final def isOuterAccessor = {
hasFlag(STABLE | SYNTHETIC) &&
@@ -495,9 +496,6 @@ trait Symbols {
(!hasFlag(METHOD | BYNAMEPARAM) || hasFlag(STABLE)) &&
!(tpe.isVolatile && !hasAnnotation(uncheckedStableClass))
- def isDeferred =
- hasFlag(DEFERRED) && !isClass
-
def isVirtualClass =
hasFlag(DEFERRED) && isClass
@@ -505,16 +503,6 @@ trait Symbols {
hasFlag(DEFERRED) && isTrait
/** Is this symbol a public */
- final def isPublic: Boolean =
- !hasFlag(PRIVATE | PROTECTED) && privateWithin == NoSymbol
-
- /** Is this symbol a private local */
- final def isPrivateLocal =
- hasFlag(PRIVATE) && hasFlag(LOCAL)
-
- /** Is this symbol a protected local */
- final def isProtectedLocal =
- hasFlag(PROTECTED) && hasFlag(LOCAL)
/** Does this symbol denote the primary constructor of its enclosing class? */
final def isPrimaryConstructor =
@@ -528,11 +516,6 @@ trait Symbols {
final def isCaseApplyOrUnapply =
isMethod && hasFlag(CASE) && hasFlag(SYNTHETIC)
- /** Is this symbol an implementation class for a mixin? */
- final def isImplClass: Boolean = isClass && hasFlag(IMPLCLASS)
-
- /** Is thhis symbol early initialized */
- final def isEarly: Boolean = isTerm && hasFlag(PRESUPER)
/** Is this symbol a trait which needs an implementation class? */
final def needsImplClass: Boolean =
@@ -560,16 +543,9 @@ trait Symbols {
final def isStaticOwner: Boolean =
isPackageClass || isModuleClass && isStatic
- /** Is this symbol final? */
- final def isFinal: Boolean = (
- hasFlag(FINAL) ||
- isTerm && (
- hasFlag(PRIVATE) || isLocal || owner.isClass && owner.hasFlag(FINAL | MODULE))
- )
-
- /** Is this symbol a sealed class? */
- final def isSealed: Boolean =
- isClass && (hasFlag(SEALED) || isValueClass(this))
+ /** Is this symbol effectively final? I.e, it cannot be overridden */
+ final def isEffectivelyFinal: Boolean = isFinal || isTerm && (
+ hasFlag(PRIVATE) || isLocal || owner.isClass && owner.hasFlag(FINAL | MODULE))
/** Is this symbol locally defined? I.e. not accessed from outside `this' instance */
final def isLocal: Boolean = owner.isTerm
@@ -611,7 +587,7 @@ trait Symbols {
/** A a member of class `base' is incomplete if
* (1) it is declared deferred or
* (2) it is abstract override and its super symbol in `base' is
- * nonexistent or inclomplete.
+ * nonexistent or incomplete.
*
* @param base ...
* @return ...
@@ -642,9 +618,6 @@ trait Symbols {
isClass && (hasFlag(STABLE) || checkStable())
}
- final def isCovariant: Boolean = isType && hasFlag(COVARIANT)
-
- final def isContravariant: Boolean = isType && hasFlag(CONTRAVARIANT)
/** The variance of this symbol as an integer */
final def variance: Int =
@@ -655,7 +628,7 @@ trait Symbols {
// Flags, owner, and name attributes --------------------------------------------------------------
def owner: Symbol = rawowner
- final def owner_=(owner: Symbol) { rawowner = owner }
+ override final def owner_=(owner: Symbol) { rawowner = owner }
def ownerChain: List[Symbol] = this :: owner.ownerChain
@@ -696,11 +669,10 @@ trait Symbols {
val fs = rawflags & phase.flagMask
(fs | ((fs & LateFlags) >>> LateShift)) & ~(fs >>> AntiShift)
}
- final def flags_=(fs: Long) = rawflags = fs
+ override final def flags_=(fs: Long) = rawflags = fs
final def setFlag(mask: Long): this.type = { rawflags = rawflags | mask; this }
final def resetFlag(mask: Long): this.type = { rawflags = rawflags & ~mask; this }
final def getFlag(mask: Long): Long = flags & mask
- final def hasFlag(mask: Long): Boolean = (flags & mask) != 0L
final def resetFlags { rawflags = rawflags & TopLevelCreationFlags }
/** The class or term up to which this symbol is accessible,
@@ -735,12 +707,12 @@ trait Symbols {
* to generate a type of kind *
* for a term symbol, its usual type
*/
- def tpe: Type = info
+ override def tpe: Type = info
/** Get type info associated with symbol at current phase, after
* ensuring that symbol is initialized (i.e. type is completed).
*/
- def info: Type = try {
+ override def info: Type = try {
var cnt = 0
while (validTo == NoPeriod) {
//if (settings.debug.value) System.out.println("completing " + this);//DEBUG
@@ -748,9 +720,14 @@ trait Symbols {
assert(infos.prev eq null, this.name)
val tp = infos.info
//if (settings.debug.value) System.out.println("completing " + this.rawname + tp.getClass());//debug
- lock {
- setInfo(ErrorType)
- throw CyclicReference(this, tp)
+ if ((rawflags & LOCKED) != 0L) { // rolled out once for performance
+ lock {
+ setInfo(ErrorType)
+ throw CyclicReference(this, tp)
+ }
+ } else {
+ rawflags |= LOCKED
+ activeLocks += 1
}
val current = phase
try {
@@ -764,7 +741,7 @@ trait Symbols {
cnt += 1
// allow for two completions:
// one: sourceCompleter to LazyType, two: LazyType to completed type
- if (cnt == 3) throw new Error("no progress in completing " + this + ":" + tp)
+ if (cnt == 3) abort("no progress in completing " + this + ":" + tp)
}
val result = rawInfo
result
@@ -774,15 +751,18 @@ trait Symbols {
throw ex
}
- /** Set initial info. */
- def setInfo(info: Type): this.type = {
+ override def info_=(info: Type) {
assert(info ne null)
infos = TypeHistory(currentPeriod, info, null)
unlock()
validTo = if (info.isComplete) currentPeriod else NoPeriod
- this
}
+ /** Set initial info. */
+ def setInfo(info: Type): this.type = { info_=(info); this }
+
+ def setInfoOwnerAdjusted(info: Type): this.type = setInfo(info.atOwner(this))
+
/** Set new info valid from start of this phase. */
final def updateInfo(info: Type): Symbol = {
assert(phaseId(infos.validFrom) <= phase.id)
@@ -903,7 +883,7 @@ trait Symbols {
* Not applicable for term symbols.
*/
def typeConstructor: Type =
- throw new Error("typeConstructor inapplicable for " + this)
+ abort("typeConstructor inapplicable for " + this)
/** @M -- tpe vs tpeHK:
* Symbol::tpe creates a TypeRef that has dummy type arguments to get a type of kind *
@@ -978,13 +958,13 @@ trait Symbols {
*/
def existentialBound: Type =
if (this.isClass)
- polyType(this.typeParams, mkTypeBounds(NothingClass.tpe, this.classBound))
+ polyType(this.typeParams, TypeBounds(NothingClass.tpe, this.classBound))
else if (this.isAbstractType)
this.info
else if (this.isTerm)
- mkTypeBounds(NothingClass.tpe, intersectionType(List(this.tpe, SingletonClass.tpe)))
+ TypeBounds(NothingClass.tpe, intersectionType(List(this.tpe, SingletonClass.tpe)))
else
- throw new Error("unexpected alias type: "+this)
+ abort("unexpected alias type: "+this)
/** Reset symbol to initial state
*/
@@ -1078,12 +1058,6 @@ trait Symbols {
// Access to related symbols --------------------------------------------------
- /** The next enclosing class */
- def enclClass: Symbol = if (isClass) this else owner.enclClass
-
- /** The next enclosing method */
- def enclMethod: Symbol = if (isSourceMethod) this else owner.enclMethod
-
/** The primary constructor of a class */
def primaryConstructor: Symbol = {
var c = info.decl(
@@ -1102,10 +1076,6 @@ trait Symbols {
/** The type of `this' in a class, or else the type of the symbol itself. */
def typeOfThis = thisSym.tpe
- /** Sets the type of `this' in a class */
- def typeOfThis_=(tp: Type): Unit =
- throw new Error("typeOfThis cannot be set for " + this)
-
/** If symbol is a class, the type <code>this.type</code> in this class,
* otherwise <code>NoPrefix</code>.
* We always have: thisType <:< typeOfThis
@@ -1173,13 +1143,6 @@ trait Symbols {
*/
def alias: Symbol = NoSymbol
- /** For parameter symbols: the method computing its default value, NoSymbol
- * for all others
- */
- def defaultGetter: Symbol = NoSymbol
- def defaultGetter_=(getter: Symbol): Unit =
- throw new Error("defaultGetter cannot be set for " + this)
-
/** For a lazy value, its lazy accessor. NoSymbol for all others */
def lazyAccessor: Symbol = NoSymbol
@@ -1193,7 +1156,7 @@ trait Symbols {
/** The directly or indirectly inherited mixins of this class
* except for mixin classes inherited by the superclass. Mixin classes appear
- * in linearlization order.
+ * in linearization order.
*/
def mixinClasses: List[Symbol] = {
val sc = superClass
@@ -1213,7 +1176,7 @@ trait Symbols {
&& !packSym.isPackageClass)
packSym = packSym.owner
if (packSym != NoSymbol)
- packSym = packSym.linkedModuleOfClass
+ packSym = packSym.companionModule
packSym
}
@@ -1245,10 +1208,30 @@ trait Symbols {
res
}
+ /** @PP: Added diagram because every time I come through here I end up
+ * losing my train of thought. [Renaming occurs.] This diagram is a
+ * bit less necessary since the renaming, but leaving in place
+ * due to high artistic merit.
+ *
+ * class Foo <
+ * ^ ^ (2) \
+ * | | | \
+ * | (5) | (3)
+ * | | | \
+ * (1) v v \
+ * object Foo (4)-> > class Foo$
+ *
+ * (1) companionClass
+ * (2) companionModule
+ * (3) linkedClassOfClass
+ * (4) moduleClass
+ * (5) companionSymbol
+ */
+
/** The class with the same name in the same package as this module or
- * case class factory. A better name would be companionClassOfModule.
+ * case class factory.
*/
- final def linkedClassOfModule: Symbol = {
+ final def companionClass: Symbol = {
if (this != NoSymbol)
flatOwnerInfo.decl(name.toTypeName).suchThat(_ isCoDefinedWith this)
else NoSymbol
@@ -1257,44 +1240,41 @@ trait Symbols {
/** A helper method that factors the common code used the discover a companion module of a class. If a companion
* module exists, its symbol is returned, otherwise, `NoSymbol` is returned. The method assumes that `this`
* symbol has already been checked to be a class (using `isClass`). */
- private final def linkedModuleOfClass0: Symbol =
+ private final def companionModule0: Symbol =
flatOwnerInfo.decl(name.toTermName).suchThat(
sym => (sym hasFlag MODULE) && (sym isCoDefinedWith this))
/** The module or case class factory with the same name in the same
- * package as this class. A better name would be companionModuleOfClass.
+ * package as this class.
*/
- final def linkedModuleOfClass: Symbol =
+ final def companionModule: Symbol =
if (this.isClass && !this.isAnonymousClass && !this.isRefinementClass)
- linkedModuleOfClass0
+ companionModule0
else NoSymbol
/** For a module its linked class, for a class its linked module or case
* factory otherwise.
*/
- final def linkedSym: Symbol =
- if (isTerm) linkedClassOfModule
+ final def companionSymbol: Symbol =
+ if (isTerm) companionClass
else if (isClass)
- linkedModuleOfClass0
+ companionModule0
else NoSymbol
- /** For a module class its linked class, for a plain class
- * the module class of its linked module.
- * For instance:
- * object Foo
- * class Foo
+ /** For a module class: its linked class
+ * For a plain class: the module class of its linked module.
*
* Then object Foo has a `moduleClass' (invisible to the user, the backend calls it Foo$
- * linkedClassOFClass goes from class Foo$ to class Foo, and back.
+ * linkedClassOfClass goes from class Foo$ to class Foo, and back.
*/
final def linkedClassOfClass: Symbol =
- if (isModuleClass) linkedClassOfModule else linkedModuleOfClass.moduleClass
+ if (isModuleClass) companionClass else companionModule.moduleClass
/**
* Returns the rawInfo of the owner. If the current phase has flat classes, it first
* applies all pending type maps to this symbol.
*
- * Asssume this is the ModuleSymbol for B in the follwing definition:
+ * assume this is the ModuleSymbol for B in the following definition:
* package p { class A { object B { val x = 1 } } }
*
* The owner after flatten is "package p" (see "def owner"). The flatten type map enters
@@ -1309,7 +1289,7 @@ trait Symbols {
/** If this symbol is an implementation class, its interface, otherwise the symbol itself
* The method follows two strategies to determine the interface.
- * - during or after erasure, it takes the last parent of the implementatation class
+ * - during or after erasure, it takes the last parent of the implementation class
* (which is always the interface, by convention)
* - before erasure, it looks up the interface name in the scope of the owner of the class.
* This only works for implementation classes owned by other classes or traits.
@@ -1327,11 +1307,6 @@ trait Symbols {
result
} else this
- /** The module corresponding to this module class (note that this
- * is not updated when a module is cloned).
- */
- def sourceModule: Symbol = NoSymbol
-
/** The module class corresponding to this module.
*/
def moduleClass: Symbol = NoSymbol
@@ -1407,7 +1382,7 @@ trait Symbols {
final def setter(base: Symbol, hasExpandedName: Boolean): Symbol = {
var sname = nme.getterToSetter(nme.getterName(name))
- if (hasExpandedName) sname = base.expandedSetterName(sname)
+ if (hasExpandedName) sname = nme.expandedSetterName(sname, base)
base.info.decl(sname) filter (_.hasFlag(ACCESSOR))
}
@@ -1417,7 +1392,7 @@ trait Symbols {
final def caseModule: Symbol = {
var modname = name.toTermName
if (privateWithin.isClass && !privateWithin.isModuleClass && !hasFlag(EXPANDEDNAME))
- modname = privateWithin.expandedName(modname)
+ modname = nme.expandedName(modname, privateWithin)
initialize.owner.info.decl(modname).suchThat(_.isModule)
}
@@ -1455,47 +1430,47 @@ trait Symbols {
getter(owner).expandName(base)
setter(owner).expandName(base)
}
- name = base.expandedName(name)
+ name = nme.expandedName(name, base)
if (isType) name = name.toTypeName
}
}
- def expandedSetterName(simpleSetterName: Name): Name =
- newTermName(fullNameString('$') + nme.TRAIT_SETTER_SEPARATOR_STRING + simpleSetterName)
-
- /** The expanded name of `name' relative to this class as base
- */
- def expandedName(name: Name): Name = {
- newTermName(fullNameString('$') + nme.EXPAND_SEPARATOR_STRING + name)
- }
-
def sourceFile: AbstractFile =
(if (isModule) moduleClass else toplevelClass).sourceFile
def sourceFile_=(f: AbstractFile) {
- throw new Error("sourceFile_= inapplicable for " + this)
+ abort("sourceFile_= inapplicable for " + this)
}
def isFromClassFile: Boolean =
(if (isModule) moduleClass else toplevelClass).isFromClassFile
/** If this is a sealed class, its known direct subclasses. Otherwise Set.empty */
- def children: Set[Symbol] = emptySymbolSet
+ def children: List[Symbol] = Nil
- /** Declare given subclass `sym' of this sealed class */
- def addChild(sym: Symbol) {
- throw new Error("addChild inapplicable for " + this)
- }
+ /** Recursively finds all sealed descendants and returns a sorted list. */
+ def sealedDescendants: List[Symbol] = {
+ val kids = children flatMap (_.sealedDescendants)
+ val all = if (this hasFlag ABSTRACT) kids else this :: kids
+ all.distinct sortBy (_.sealedSortName)
+ }
// ToString -------------------------------------------------------------------
/** A tag which (in the ideal case) uniquely identifies class symbols */
- final def tag: Int = fullNameString.hashCode()
+ final def tag: Int = fullName.hashCode()
/** The simple name of this Symbol */
final def simpleName: Name = name
+ /** The String used to order otherwise identical sealed symbols.
+ * This uses data which is stable across runs and variable classpaths
+ * (the initial Name) before falling back on id, which varies depending
+ * on exactly when a symbol is loaded.
+ */
+ final def sealedSortName: String = initName.toString + "#" + id
+
/** String representation of symbol's definition key word */
final def keyString: String =
if (isTrait && hasFlag(JAVA)) "interface"
@@ -1534,32 +1509,11 @@ trait Symbols {
* E.g. $eq => =.
* If settings.uniquId adds id.
*/
- def nameString: String = cleanNameString + idString
+ def nameString: String = decodedName + idString
- /** A nameString that never adds idString, for use in e.g. GenJVM
- * where appending #uniqid breaks the bytecode.
+ /** The name of the symbol before decoding, e.g. `$eq$eq` instead of `==`.
*/
- def cleanNameString: String = {
- val s = simpleName.decode
- if (s endsWith nme.LOCAL_SUFFIX) s.substring(0, s.length - nme.LOCAL_SUFFIX.length)
- else s
- }
-
- /** String representation of symbol's full name with <code>separator</code>
- * between class names.
- * Never translates expansions of operators back to operator symbol.
- * Never adds id.
- */
- final def fullNameString(separator: Char): String = {
- var str =
- if (isRoot || isRootPackage || this == NoSymbol) this.toString
- else if (owner.isRoot || owner.isEmptyPackageClass || owner.isInterpreterWrapper) simpleName.toString
- else owner.enclClass.fullNameString(separator) + separator + simpleName
- if (str.charAt(str.length - 1) == ' ') str = str.substring(0, str.length - 1)
- str
- }
-
- final def fullNameString: String = fullNameString('.')
+ def encodedName: String = name.toString
/** If settings.uniqid is set, the symbol's id, else "" */
final def idString: String =
@@ -1670,13 +1624,13 @@ trait Symbols {
privateWithin = NoSymbol
protected var referenced: Symbol = NoSymbol
- protected var defGetter: Symbol = NoSymbol
- def cloneSymbolImpl(owner: Symbol): Symbol = {
- val clone = new TermSymbol(owner, pos, name)
- clone.referenced = referenced
- clone.defGetter = defGetter
- clone
+ def cloneSymbolImpl(owner: Symbol): Symbol =
+ new TermSymbol(owner, pos, name).copyAttrsFrom(this)
+
+ def copyAttrsFrom(original: TermSymbol): this.type = {
+ referenced = original.referenced
+ this
}
private val validAliasFlags = SUPERACCESSOR | PARAMACCESSOR | MIXEDIN | SPECIALIZED
@@ -1694,10 +1648,6 @@ trait Symbols {
this
}
- override def defaultGetter = defGetter
- override def defaultGetter_=(getter: Symbol): Unit =
- defGetter = getter
-
override def outerSource: Symbol =
if (name endsWith nme.OUTER) initialize.referenced
else NoSymbol
@@ -1756,7 +1706,6 @@ trait Symbols {
/** A class for module symbols */
class ModuleSymbol(initOwner: Symbol, initPos: Position, initName: Name)
extends TermSymbol(initOwner, initPos, initName) {
-
private var flatname = nme.EMPTY
override def owner: Symbol =
@@ -1774,10 +1723,33 @@ trait Symbols {
flatname
} else rawname
- override def cloneSymbolImpl(owner: Symbol): Symbol = {
- val clone = new ModuleSymbol(owner, pos, name)
- clone.referenced = referenced
- clone
+ override def cloneSymbolImpl(owner: Symbol): Symbol =
+ new ModuleSymbol(owner, pos, name).copyAttrsFrom(this)
+ }
+
+ /** A class for method symbols */
+ class MethodSymbol(initOwner: Symbol, initPos: Position, initName: Name)
+ extends TermSymbol(initOwner, initPos, initName) {
+
+ private var mtpePeriod = NoPeriod
+ private var mtpePre: Type = _
+ private var mtpeResult: Type = _
+
+ override def cloneSymbolImpl(owner: Symbol): Symbol =
+ new MethodSymbol(owner, pos, name).copyAttrsFrom(this)
+
+ def typeAsMemberOf(pre: Type): Type = {
+ if (mtpePeriod == currentPeriod) {
+ if (mtpePre eq pre) return mtpeResult
+ } else if (isValid(mtpePeriod)) {
+ mtpePeriod = currentPeriod
+ if (mtpePre eq pre) return mtpeResult
+ }
+ val res = pre.computeMemberType(this)
+ mtpePeriod = currentPeriod
+ mtpePre = pre
+ mtpeResult = res
+ res
}
}
@@ -1793,7 +1765,7 @@ trait Symbols {
private var tpePeriod = NoPeriod
override def isType = true
- override def isTypeMember = true
+ override def isNonClassType = true
override def isAbstractType = isDeferred
override def isAliasType = !isDeferred
@@ -1827,7 +1799,7 @@ trait Symbols {
tpeCache
}
- // needed for experimentlal code for early types as type parameters
+ // needed for experimental code for early types as type parameters
// def refreshType() { tpePeriod = NoPeriod }
override def typeConstructor: Type = {
@@ -1840,7 +1812,7 @@ trait Symbols {
tyconCache
}
- override def setInfo(tp: Type): this.type = {
+ override def info_=(tp: Type) {
tpePeriod = NoPeriod
tyconCache = null
if (tp.isComplete)
@@ -1849,8 +1821,7 @@ trait Symbols {
case NoType | AnnotatedType(_, _, _) => ;
case _ => setFlag(MONOMORPHIC)
}
- super.setInfo(tp)
- this
+ super.info_=(tp)
}
override def reset(completer: Type) {
@@ -1943,7 +1914,7 @@ trait Symbols {
private var thissym: Symbol = this
override def isClass: Boolean = true
- override def isTypeMember = false
+ override def isNonClassType = false
override def isAbstractType = false
override def isAliasType = false
@@ -1977,7 +1948,7 @@ trait Symbols {
val period = thisTypePeriod
if (period != currentPeriod) {
thisTypePeriod = currentPeriod
- if (!isValid(period)) thisTypeCache = mkThisType(this)
+ if (!isValid(period)) thisTypeCache = ThisType(this)
}
thisTypeCache
}
@@ -2007,10 +1978,10 @@ trait Symbols {
}
override def sourceModule =
- if (isModuleClass) linkedModuleOfClass else NoSymbol
+ if (isModuleClass) companionModule else NoSymbol
- private var childSet: Set[Symbol] = emptySymbolSet
- override def children: Set[Symbol] = childSet
+ private var childSet: Set[Symbol] = Set()
+ override def children: List[Symbol] = childSet.toList sortBy (_.sealedSortName)
override def addChild(sym: Symbol) { childSet = childSet + sym }
incCounter(classSymbolCount)
@@ -2026,29 +1997,28 @@ trait Symbols {
def this(module: TermSymbol) = {
this(module.owner, module.pos, module.name.toTypeName)
setFlag(module.getFlag(ModuleToClassFlags) | MODULE | FINAL)
- setSourceModule(module)
+ sourceModule = module
}
override def sourceModule = module
lazy val implicitMembers = info.implicitMembers
- def setSourceModule(module: Symbol) { this.module = module }
+ override def sourceModule_=(module: Symbol) { this.module = module }
}
- /** An object repreesenting a missing symbol */
+ /** An object representing a missing symbol */
object NoSymbol extends Symbol(null, NoPosition, nme.NOSYMBOL) {
setInfo(NoType)
privateWithin = this
- override def setInfo(info: Type): this.type = {
+ override def info_=(info: Type) {
infos = TypeHistory(1, NoType, null)
unlock()
validTo = currentPeriod
- this
}
override def defString: String = toString
override def locationString: String = ""
override def enclClass: Symbol = this
override def toplevelClass: Symbol = this
override def enclMethod: Symbol = this
- override def owner: Symbol = throw new Error("no-symbol does not have owner")
+ override def owner: Symbol = abort("no-symbol does not have owner")
override def sourceFile: AbstractFile = null
override def ownerChain: List[Symbol] = List()
override def ownersIterator: Iterator[Symbol] = Iterator.empty
@@ -2058,7 +2028,7 @@ trait Symbols {
override def rawInfo: Type = NoType
protected def doCookJavaRawInfo() {}
override def accessBoundary(base: Symbol): Symbol = RootClass
- def cloneSymbolImpl(owner: Symbol): Symbol = throw new Error()
+ def cloneSymbolImpl(owner: Symbol): Symbol = abort()
}
diff --git a/src/compiler/scala/tools/nsc/symtab/Types.scala b/src/compiler/scala/tools/nsc/symtab/Types.scala
index 37ae9f35fe..4211392b88 100644
--- a/src/compiler/scala/tools/nsc/symtab/Types.scala
+++ b/src/compiler/scala/tools/nsc/symtab/Types.scala
@@ -8,12 +8,12 @@ package scala.tools.nsc
package symtab
import scala.collection.immutable
-import scala.collection.mutable.{ListBuffer, HashMap, WeakHashMap}
+import scala.collection.mutable.{ListBuffer, WeakHashMap}
import ast.TreeGen
import util.{HashSet, Position, NoPosition}
import util.Statistics._
import Flags._
-
+import scala.util.control.ControlThrowable
/* A standard type pattern match:
case ErrorType =>
@@ -60,8 +60,7 @@ import Flags._
case DeBruijnIndex(level, index)
*/
-trait Types {
- self: SymbolTable =>
+trait Types extends reflect.generic.Types { self: SymbolTable =>
import definitions._
@@ -74,6 +73,7 @@ trait Types {
private final val LogPendingSubTypesThreshold = 50
private final val LogPendingBaseTypesThreshold = 50
+ private final val LogVolatileThreshold = 50
/** A don't care value for the depth parameter in lubs/glbs and related operations */
private final val AnyDepth = -3
@@ -202,7 +202,7 @@ trait Types {
/** A proxy for a type (identified by field `underlying') that forwards most
* operations to it. Every operation that is overridden for some kind of types is
- * forwarded here. Some opererations are rewrapped again.
+ * forwarded here. Some operations are rewrapped again.
*/
trait RewrappingTypeProxy extends SimpleTypeProxy {
protected def maybeRewrap(newtp: Type) = if (newtp eq underlying) this else rewrap(newtp)
@@ -227,6 +227,7 @@ trait Types {
override def normalize = maybeRewrap(underlying.normalize)
override def dealias = maybeRewrap(underlying.dealias)
override def cloneInfo(owner: Symbol) = maybeRewrap(underlying.cloneInfo(owner))
+ override def atOwner(owner: Symbol) = maybeRewrap(underlying.atOwner(owner))
override def prefixString = underlying.prefixString
override def isComplete = underlying.isComplete
override def complete(sym: Symbol) = underlying.complete(sym)
@@ -236,7 +237,7 @@ trait Types {
}
/** The base class for all types */
- abstract class Type {
+ abstract class Type extends AbsType {
/** Types for which asSeenFrom always is the identity, no matter what
* prefix or owner.
@@ -302,7 +303,7 @@ trait Types {
def typeOfThis: Type = typeSymbol.typeOfThis
/** Map to a singleton type which is a subtype of this type.
- * todo: change to singleton type of an existentgially defined variable
+ * todo: change to singleton type of an existentially defined variable
* of the right type instead of making this a `this` of a refined type.
*/
def narrow: Type =
@@ -319,7 +320,7 @@ trait Types {
* for a reference denoting an abstract type, its bounds,
* for all other types, a TypeBounds type all of whose bounds are this type.
*/
- def bounds: TypeBounds = mkTypeBounds(this, this)
+ def bounds: TypeBounds = TypeBounds(this, this)
/** For a class or intersection type, its parents.
* For a TypeBounds type, the parents of its hi bound.
@@ -349,6 +350,11 @@ trait Types {
def resultType(actuals: List[Type]) = this
+ /** If this is a TypeRef `clazz`[`T`], return the argument `T`
+ * otherwise return this type
+ */
+ def remove(clazz: Symbol): Type = this
+
def resultApprox: Type = ApproximateDeBruijnMap(resultType)
/** For a curried method or poly type its non-method result type,
@@ -508,14 +514,18 @@ trait Types {
}
/** The type of `sym', seen as a member of this type. */
- def memberType(sym: Symbol): Type = {
- //@M don't prematurely instantiate higher-kinded types, they will be instantiated by transform, typedTypeApply, etc. when really necessary
- sym.tpeHK match {
- case ov @ OverloadedType(pre, alts) =>
- OverloadedType(this, alts)
- case tp =>
- tp.asSeenFrom(this, sym.owner)
- }
+ def memberType(sym: Symbol): Type = sym match {
+ case meth: MethodSymbol =>
+ meth.typeAsMemberOf(this)
+ case _ =>
+ computeMemberType(sym)
+ }
+
+ def computeMemberType(sym: Symbol): Type = sym.tpeHK match { //@M don't prematurely instantiate higher-kinded types, they will be instantiated by transform, typedTypeApply, etc. when really necessary
+ case OverloadedType(_, alts) =>
+ OverloadedType(this, alts)
+ case tp =>
+ tp.asSeenFrom(this, sym.owner)
}
/** Substitute types `to' for occurrences of references to
@@ -580,6 +590,34 @@ trait Types {
}
}
+ /** Can this type only be subtyped by bottom types?
+ * This is assessed to be the case if the class is final,
+ * and all type parameters (if any) are invariant.
+ */
+ def isFinalType = (
+ typeSymbol.isFinal &&
+ (typeSymbol.typeParams forall (_.variance == 0))
+ )
+
+ /** Is this type a subtype of that type in a pattern context?
+ * Any type arguments on the right hand side are replaced with
+ * fresh existentials, except for Arrays.
+ *
+ * See bug1434.scala for an example of code which would fail
+ * if only a <:< test were applied.
+ */
+ def matchesPattern(that: Type): Boolean = {
+ (this <:< that) || ((this, that) match {
+ case (TypeRef(_, ArrayClass, List(arg1)), TypeRef(_, ArrayClass, List(arg2))) if arg2.typeSymbol.typeParams.nonEmpty =>
+ arg1 matchesPattern arg2
+ case (_, TypeRef(_, _, args)) =>
+ val newtp = existentialAbstraction(args map (_.typeSymbol), that)
+ !(that =:= newtp) && (this <:< newtp)
+ case _ =>
+ false
+ })
+ }
+
def stat_<:<(that: Type): Boolean = {
incCounter(subtypeCount)
val start = startTimer(subtypeNanos)
@@ -673,7 +711,7 @@ trait Types {
if (sym == btssym) return mid
else if (sym isLess btssym) hi = mid - 1
else if (btssym isLess sym) lo = mid + 1
- else throw new Error()
+ else abort()
}
-1
}
@@ -683,6 +721,10 @@ trait Types {
*/
def cloneInfo(owner: Symbol) = this
+ /** Make sure this type is correct as the info of given owner; clone it if not.
+ */
+ def atOwner(owner: Symbol) = this
+
protected def objectPrefix = "object "
protected def packagePrefix = "package "
@@ -715,17 +757,10 @@ trait Types {
typeVarToOriginMap(this) eq this
}
- /** Is this type completed (i.e. not a lazy type)?
- */
- def isComplete: Boolean = true
-
/** Is this type a varargs parameter?
*/
def isVarargs: Boolean = typeSymbol == RepeatedParamClass
- /** If this is a lazy type, assign a new type to `sym'. */
- def complete(sym: Symbol) {}
-
/** If this is a symbol loader type, load and assign a new type to
* `sym'.
*/
@@ -846,7 +881,7 @@ trait Types {
}
}
- /** The existential skolems and existentially quantifed variables which are free in this type */
+ /** The existential skolems and existentially quantified variables which are free in this type */
def existentialSkolems: List[Symbol] = {
var boundSyms: List[Symbol] = List()
var skolems: List[Symbol] = List()
@@ -895,24 +930,8 @@ trait Types {
/** The kind of this type; used for debugging */
def kind: String = "unknown type of class "+getClass()
-
- override def toString: String =
- if (tostringRecursions >= maxTostringRecursions)
- "..."
- else
- try {
- tostringRecursions += 1
- safeToString
- } finally {
- tostringRecursions -= 1
- }
-
- def safeToString: String = super.toString
}
- private final val maxTostringRecursions = 50
- private var tostringRecursions = 0
-
// Subclasses ------------------------------------------------------------
trait UniqueType {
@@ -956,10 +975,11 @@ trait Types {
incCounter(singletonBaseTypeSeqCount)
underlying.baseTypeSeq prepend this
}
+ override def isHigherKinded = false // singleton type classifies objects, thus must be kind *
override def safeToString: String = prefixString + "type"
/*
override def typeOfThis: Type = typeSymbol.typeOfThis
- override def bounds: TypeBounds = mkTypeBounds(this, this)
+ override def bounds: TypeBounds = TypeBounds(this, this)
override def prefix: Type = NoType
override def typeArgs: List[Type] = List()
override def typeParams: List[Symbol] = List()
@@ -1018,7 +1038,7 @@ trait Types {
/** A class for this-types of the form <sym>.this.type
*/
- case class ThisType(sym: Symbol) extends SingletonType {
+ abstract case class ThisType(sym: Symbol) extends SingletonType {
//assert(sym.isClass && !sym.isModuleClass || sym.isRoot, sym)
override def isTrivial: Boolean = sym.isPackageClass
override def isNotNull = true
@@ -1029,7 +1049,7 @@ trait Types {
if (settings.debug.value) sym.nameString + ".this."
else if (sym.isRoot || sym.isEmptyPackageClass || sym.isInterpreterWrapper || sym.isScalaPackageClass) ""
else if (sym.isAnonymousClass || sym.isRefinementClass) "this."
- else if (sym.isModuleClass) sym.fullNameString + "."
+ else if (sym.isModuleClass) sym.fullName + "."
else sym.nameString + ".this."
override def safeToString: String =
if (sym.isRoot) "<root>"
@@ -1039,6 +1059,13 @@ trait Types {
override def kind = "ThisType"
}
+ object ThisType extends ThisTypeExtractor {
+ def apply(sym: Symbol): Type =
+ if (!phase.erasedTypes) unique(new ThisType(sym) with UniqueType)
+ else if (sym.isImplClass) sym.typeOfThis
+ else sym.tpe
+ }
+
case class DeBruijnIndex(level: Int, paramId: Int) extends Type {
override def isTrivial = true
override def isStable = true
@@ -1063,9 +1090,9 @@ trait Types {
underlyingPeriod = currentPeriod
if (!isValid(period)) {
underlyingCache = pre.memberType(sym).resultType;
+ assert(underlyingCache ne this, this)
}
}
- assert(underlyingCache ne this, this)
underlyingCache
}
@@ -1093,7 +1120,9 @@ trait Types {
override def kind = "SingleType"
}
- case class SuperType(thistpe: Type, supertpe: Type) extends SingletonType {
+ object SingleType extends SingleTypeExtractor
+
+ abstract case class SuperType(thistpe: Type, supertpe: Type) extends SingletonType {
override val isTrivial: Boolean = thistpe.isTrivial && supertpe.isTrivial
override def isNotNull = true;
override def typeSymbol = thistpe.typeSymbol
@@ -1107,9 +1136,15 @@ trait Types {
override def kind = "SuperType"
}
+ object SuperType extends SuperTypeExtractor {
+ def apply(thistp: Type, supertp: Type): Type =
+ if (phase.erasedTypes) supertp
+ else unique(new SuperType(thistp, supertp) with UniqueType)
+ }
+
/** A class for the bounds of abstract types and type parameters
*/
- case class TypeBounds(lo: Type, hi: Type) extends SubType {
+ abstract case class TypeBounds(lo: Type, hi: Type) extends SubType {
def supertype = hi
override val isTrivial: Boolean = lo.isTrivial && hi.isTrivial
override def bounds: TypeBounds = this
@@ -1122,6 +1157,11 @@ trait Types {
override def kind = "TypeBoundsType"
}
+ object TypeBounds extends TypeBoundsExtractor {
+ def apply(lo: Type, hi: Type): TypeBounds =
+ unique(new TypeBounds(lo, hi) with UniqueType)
+ }
+
/** A common base class for intersection types and class types
*/
abstract class CompoundType extends Type {
@@ -1274,13 +1314,13 @@ trait Types {
if (isHigherKinded)
PolyType(
typeParams,
- refinementOfClass(
- typeSymbol,
+ RefinedType(
parents map {
case TypeRef(pre, sym, List()) => TypeRef(pre, sym, dummyArgs)
case p => p
},
- decls))
+ decls,
+ typeSymbol))
else super.normalize
}
@@ -1314,6 +1354,11 @@ trait Types {
override def kind = "RefinedType"
}
+ object RefinedType extends RefinedTypeExtractor {
+ def apply(parents: List[Type], decls: Scope, clazz: Symbol) =
+ new RefinedType(parents, decls) { override def typeSymbol = clazz }
+ }
+
/** A class representing a class info
*/
case class ClassInfoType(
@@ -1454,6 +1499,8 @@ trait Types {
override def kind = "ClassInfoType"
}
+ object ClassInfoType extends ClassInfoTypeExtractor
+
class PackageClassInfoType(decls: Scope, clazz: Symbol)
extends ClassInfoType(List(), decls, clazz)
@@ -1461,7 +1508,7 @@ trait Types {
*
* @param value ...
*/
- case class ConstantType(value: Constant) extends SingletonType {
+ abstract case class ConstantType(value: Constant) extends SingletonType {
override def underlying: Type = value.tpe
assert(underlying.typeSymbol != UnitClass)
override def isTrivial: Boolean = true
@@ -1474,6 +1521,21 @@ trait Types {
override def kind = "ConstantType"
}
+ object ConstantType extends ConstantTypeExtractor {
+ def apply(value: Constant): ConstantType = {
+ class UniqueConstantType extends ConstantType(value) with UniqueType {
+ /** Save the type of 'value'. For Java enums, it depends on finding the linked class,
+ * which might not be found after 'flatten'. */
+ private lazy val _tpe: Type = value.tpe
+ override def underlying: Type = _tpe
+ }
+ unique(new UniqueConstantType)
+ }
+ }
+
+ private var volatileRecursions: Int = 0
+ private val pendingVolatiles = new collection.mutable.HashSet[Symbol]
+
/** A class for named types of the form
* `&lt;prefix&gt;.&lt;sym.name&gt;[args]'
* Cannot be created directly; one should always use `typeRef'
@@ -1484,7 +1546,7 @@ trait Types {
* @param sym ...
* @param args ...
*/
- case class TypeRef(pre: Type, sym: Symbol, args: List[Type]) extends Type {
+ abstract case class TypeRef(pre: Type, sym: Symbol, args: List[Type]) extends Type {
// assert(!sym.isAbstractType || pre.isStable || pre.isError)
// assert(!pre.isInstanceOf[ClassInfoType], this)
// assert(!(sym hasFlag (PARAM | EXISTENTIAL)) || pre == NoPrefix, this)
@@ -1502,9 +1564,32 @@ trait Types {
sym.isAbstractType && (bounds.hi.typeSymbol isSubClass SingletonClass)
}
- override def isVolatile: Boolean =
+ override def isVolatile: Boolean = {
sym.isAliasType && normalize.isVolatile ||
- sym.isAbstractType && bounds.hi.isVolatile
+ sym.isAbstractType && {
+ // need to be careful not to fall into an infinite recursion here
+ // because volatile checking is done before all cycles are detected.
+ // the case to avoid is an abstract type directly or
+ // indirectly upper-bounded by itself. See #2918
+ try {
+ volatileRecursions += 1
+ if (volatileRecursions < LogVolatileThreshold)
+ bounds.hi.isVolatile
+ else if (pendingVolatiles contains sym)
+ true // we can return true here, because a cycle will be detected
+ // here afterwards and an error will result anyway.
+ else
+ try {
+ pendingVolatiles += sym
+ bounds.hi.isVolatile
+ } finally {
+ pendingVolatiles -= sym
+ }
+ } finally {
+ volatileRecursions -= 1
+ }
+ }
+ }
override val isTrivial: Boolean =
!sym.isTypeParameter && pre.isTrivial && args.forall(_.isTrivial)
@@ -1525,10 +1610,10 @@ trait Types {
def thisInfo =
if (sym.isAliasType) normalize
- else if (sym.isTypeMember) transformInfo(sym.info)
+ else if (sym.isNonClassType) transformInfo(sym.info)
else sym.info
- def relativeInfo = if (sym.isTypeMember) transformInfo(pre.memberInfo(sym)) else pre.memberInfo(sym)
+ def relativeInfo = if (sym.isNonClassType) transformInfo(pre.memberInfo(sym)) else pre.memberInfo(sym)
override def typeSymbol = if (sym.isAliasType) normalize.typeSymbol else sym
override def termSymbol = if (sym.isAliasType) normalize.termSymbol else super.termSymbol
@@ -1590,7 +1675,7 @@ A type's typeSymbol should never be inspected directly.
// (!result.isEmpty) IFF isHigherKinded
override def typeParams: List[Symbol] = if (isHigherKinded) typeParamsDirect else List()
- override def typeConstructor = rawTypeRef(pre, sym, List())
+ override def typeConstructor = TypeRef(pre, sym, List())
// a reference (in a Scala program) to a type that has type parameters, but where the reference does not include type arguments
// note that it doesn't matter whether the symbol refers to a java or scala symbol,
@@ -1620,6 +1705,9 @@ A type's typeSymbol should never be inspected directly.
xform.dealias
} else this
+ override def remove(clazz: Symbol): Type =
+ if (sym == clazz && !args.isEmpty) args.head else this
+
def normalize0: Type =
if (sym.isAliasType) { // beta-reduce
if (sym.info.typeParams.length == args.length || !isHigherKinded) {
@@ -1628,14 +1716,14 @@ A type's typeSymbol should never be inspected directly.
val xform = transform(sym.info.resultType)
assert(xform ne this, this)
xform.normalize // cycles have been checked in typeRef
- } else {
- PolyType(typeParams, transform(sym.info.resultType).normalize) // eta-expand
+ } else { // should rarely happen, if at all
+ PolyType(sym.info.typeParams, transform(sym.info.resultType).normalize) // eta-expand -- for regularity, go through sym.info for typeParams
// @M TODO: should not use PolyType, as that's the type of a polymorphic value -- we really want a type *function*
}
} else if (isHigherKinded) {
// @M TODO: should not use PolyType, as that's the type of a polymorphic value -- we really want a type *function*
- // @M: initialize needed (see test/files/pos/ticket0137.scala)
- PolyType(typeParams, typeRef(pre, sym.initialize, dummyArgs))
+ // @M: initialize (by sym.info call) needed (see test/files/pos/ticket0137.scala)
+ PolyType(sym.info.typeParams, typeRef(pre, sym, dummyArgs)) // must go through sym.info for typeParams
} else if (sym.isRefinementClass) {
sym.info.normalize // @MO to AM: OK?
//@M I think this is okay, but changeset 12414 (which fixed #1241) re-introduced another bug (#2208)
@@ -1732,7 +1820,7 @@ A type's typeSymbol should never be inspected directly.
}
val monopart =
if (!settings.debug.value &&
- (shorthands contains sym.fullNameString) &&
+ (shorthands contains sym.fullName) &&
(sym.ownerChain forall (_.isClass))) // ensure that symbol is not a local copy with a name coincidence
sym.name.toString
else
@@ -1766,7 +1854,7 @@ A type's typeSymbol should never be inspected directly.
sym.isAnonymousClass || sym.isRefinementClass || sym.isScalaPackageClass)
""
else if (sym.isPackageClass)
- sym.fullNameString + "."
+ sym.fullName + "."
else if (isStable && (sym.name.toString endsWith ".type"))
sym.name.toString.substring(0, sym.name.length - 4)
else
@@ -1775,6 +1863,13 @@ A type's typeSymbol should never be inspected directly.
override def kind = "TypeRef"
}
+ object TypeRef extends TypeRefExtractor {
+ def apply(pre: Type, sym: Symbol, args: List[Type]): Type = {
+ class rawTypeRef extends TypeRef(pre, sym, args) with UniqueType
+ unique(new rawTypeRef)
+ }
+ }
+
/** A class representing a method type with parameters.
*/
case class MethodType(override val params: List[Symbol],
@@ -1782,6 +1877,9 @@ A type's typeSymbol should never be inspected directly.
override val isTrivial: Boolean =
params.forall(_.tpe.isTrivial) && resultType.isTrivial
+ def isImplicit = params.nonEmpty && params.head.isImplicit
+ def isJava = false // can we do something like for implicits? I.e. do Java methods without parameters need to be recognized?
+
//assert(paramTypes forall (pt => !pt.typeSymbol.isImplClass))//DEBUG
override def paramSectionCount: Int = resultType.paramSectionCount + 1
@@ -1821,14 +1919,20 @@ A type's typeSymbol should never be inspected directly.
copyMethodType(this, vparams, resultType.substSym(params, vparams).cloneInfo(owner))
}
+ override def atOwner(owner: Symbol) =
+ if ((params exists (_.owner != owner)) || (resultType.atOwner(owner) ne resultType))
+ cloneInfo(owner)
+ else
+ this
+
override def kind = "MethodType"
}
- // todo: this class is no longer needed, a method type is implicit if the first
- // parameter has the IMPLICIT flag
- class ImplicitMethodType(ps: List[Symbol], rt: Type) extends MethodType(ps, rt)
+ object MethodType extends MethodTypeExtractor
- class JavaMethodType(ps: List[Symbol], rt: Type) extends MethodType(ps, rt)
+ class JavaMethodType(ps: List[Symbol], rt: Type) extends MethodType(ps, rt) {
+ override def isJava = true
+ }
/** A class representing a polymorphic type or, if tparams.length == 0,
* a parameterless method type.
@@ -1841,6 +1945,7 @@ A type's typeSymbol should never be inspected directly.
*/
case class PolyType(override val typeParams: List[Symbol], override val resultType: Type)
extends Type {
+ // assert(!(typeParams contains NoSymbol), this)
override def paramSectionCount: Int = resultType.paramSectionCount
override def paramss: List[List[Symbol]] = resultType.paramss
@@ -1879,9 +1984,17 @@ A type's typeSymbol should never be inspected directly.
PolyType(tparams, resultType.substSym(typeParams, tparams).cloneInfo(owner))
}
+ override def atOwner(owner: Symbol) =
+ if ((typeParams exists (_.owner != owner)) || (resultType.atOwner(owner) ne resultType))
+ cloneInfo(owner)
+ else
+ this
+
override def kind = "PolyType"
}
+ object PolyType extends PolyTypeExtractor
+
case class ExistentialType(quantified: List[Symbol],
override val underlying: Type) extends RewrappingTypeProxy
{
@@ -1961,6 +2074,9 @@ A type's typeSymbol should never be inspected directly.
ExistentialType(tparams, underlying.substSym(quantified, tparams))
}
+ override def atOwner(owner: Symbol) =
+ if (quantified exists (_.owner != owner)) cloneInfo(owner) else this
+
override def kind = "ExistentialType"
def withTypeVars(op: Type => Boolean): Boolean = withTypeVars(op, AnyDepth)
@@ -1976,6 +2092,8 @@ A type's typeSymbol should never be inspected directly.
}
}
+ object ExistentialType extends ExistentialTypeExtractor
+
/** A class containing the alternatives and type prefix of an overloaded symbol.
* Not used after phase `typer'.
*/
@@ -2022,7 +2140,7 @@ A type's typeSymbol should never be inspected directly.
/** A class representing a type variable
* Not used after phase `typer'.
- * A higher-kinded type variable has type arguments (a list of Type's) and type paramers (list of Symbols)
+ * A higher-kinded type variable has type arguments (a list of Type's) and type parameters (list of Symbols)
* A TypeVar whose list of args is non-empty can only be instantiated by a higher-kinded type that can be applied to these args
* a typevar is much like a typeref, except it has special logic for type equality/subtyping
*/
@@ -2225,7 +2343,7 @@ A type's typeSymbol should never be inspected directly.
override def bounds: TypeBounds = {
val oftp = underlying.bounds
oftp match {
- case TypeBounds(lo, hi) if ((lo eq this) && (hi eq this)) => mkTypeBounds(this,this)
+ case TypeBounds(lo, hi) if ((lo eq this) && (hi eq this)) => TypeBounds(this,this)
case _ => oftp
}
}
@@ -2252,6 +2370,8 @@ A type's typeSymbol should never be inspected directly.
override def kind = "AnnotatedType"
}
+ object AnnotatedType extends AnnotatedTypeExtractor
+
/** A class representing types with a name. When an application uses
* named arguments, the named argument types for calling isApplicable
* are represented as NamedType.
@@ -2262,10 +2382,7 @@ A type's typeSymbol should never be inspected directly.
/** A class representing an as-yet unevaluated type.
*/
- abstract class LazyType extends Type {
- override def isComplete: Boolean = false
- override def complete(sym: Symbol)
- override def safeToString = "<?>"
+ abstract class LazyType extends Type with AbsLazyType {
override def kind = "LazyType"
}
@@ -2276,7 +2393,7 @@ A type's typeSymbol should never be inspected directly.
*/
private def rebind(pre: Type, sym: Symbol): Symbol = {
val owner = sym.owner
- if (owner.isClass && owner != pre.typeSymbol && !sym.isFinal && !sym.isClass) {
+ if (owner.isClass && owner != pre.typeSymbol && !sym.isEffectivelyFinal && !sym.isClass) {
//Console.println("rebind "+pre+" "+sym)//DEBUG
val rebind = pre.nonPrivateMember(sym.name).suchThat(sym => sym.isType || sym.isStable)
if (rebind == NoSymbol) sym
@@ -2292,25 +2409,18 @@ A type's typeSymbol should never be inspected directly.
*/
private def removeSuper(tp: Type, sym: Symbol): Type = tp match {
case SuperType(thistp, _) =>
- if (sym.isFinal || sym.isDeferred) thistp
+ if (sym.isEffectivelyFinal || sym.isDeferred) thistp
else tp
case _ =>
tp
}
- /** The canonical creator for this-types */
- def mkThisType(sym: Symbol): Type = {
- if (!phase.erasedTypes) unique(new ThisType(sym) with UniqueType)
- else if (sym.isImplClass) sym.typeOfThis
- else sym.tpe
- }
-
/** The canonical creator for single-types */
def singleType(pre: Type, sym: Symbol): Type = {
if (phase.erasedTypes)
sym.tpe.resultType
else if (sym.isRootPackage)
- mkThisType(RootClass)
+ ThisType(RootClass)
else {
var sym1 = rebind(pre, sym)
val pre1 = removeSuper(pre, sym1)
@@ -2319,34 +2429,13 @@ A type's typeSymbol should never be inspected directly.
}
}
- /** The canonical creator for super-types */
- def mkSuperType(thistp: Type, supertp: Type): Type =
- if (phase.erasedTypes) supertp
- else {
- unique(new SuperType(thistp, supertp) with UniqueType)
- }
-
- /** The canonical creator for type bounds */
- def mkTypeBounds(lo: Type, hi: Type): TypeBounds = {
- unique(new TypeBounds(lo, hi) with UniqueType)
- }
-
- def refinementOfClass(clazz: Symbol, parents: List[Type], decls: Scope) = {
- class RefinementOfClass extends RefinedType(parents, decls) {
- override def typeSymbol: Symbol = clazz
- }
- new RefinementOfClass
- }
-
-
-
/** the canonical creator for a refined type with a given scope */
def refinedType(parents: List[Type], owner: Symbol, decls: Scope, pos : Position): Type = {
if (phase.erasedTypes)
if (parents.isEmpty) ObjectClass.tpe else parents.head
else {
val clazz = owner.newRefinementClass(NoPosition)
- val result = refinementOfClass(clazz, parents, decls)
+ val result = RefinedType(parents, decls, clazz)
clazz.setInfo(result)
result
}
@@ -2376,17 +2465,6 @@ A type's typeSymbol should never be inspected directly.
result
}
- /** the canonical creator for a constant type */
- def mkConstantType(value: Constant): ConstantType = {
- class UniqueConstantType extends ConstantType(value) with UniqueType {
- /** Save the type of 'value'. For Java enums, it depends on finding the linked class,
- * which might not be found after 'flatten'. */
- private lazy val _tpe: Type = value.tpe
- override def underlying: Type = _tpe
- }
- unique(new UniqueConstantType)
- }
-
/** The canonical creator for typerefs
* todo: see how we can clean this up a bit
*/
@@ -2406,7 +2484,7 @@ A type's typeSymbol should never be inspected directly.
transform(sym1.info) // check there are no cycles
sym1.unlock()
*/
- rawTypeRef(pre, sym1, args) // don't expand type alias (cycles checked above)
+ TypeRef(pre, sym1, args) // don't expand type alias (cycles checked above)
} else {
val pre1 = removeSuper(pre, sym1)
if (pre1 ne pre) {
@@ -2417,31 +2495,20 @@ A type's typeSymbol should never be inspected directly.
// sharpen prefix so that it is maximal and still contains the class.
var p = pre.parents.reverse
while (!p.isEmpty && p.head.member(sym1.name) != sym1) p = p.tail
- if (p.isEmpty) rawTypeRef(pre, sym1, args)
+ if (p.isEmpty) TypeRef(pre, sym1, args)
else typeRef(p.head, sym1, args)
} else {
- rawTypeRef(pre, sym1, args)
+ TypeRef(pre, sym1, args)
}
}
}
- /** create a type-ref as found, without checks or rebinds */
- def rawTypeRef(pre: Type, sym: Symbol, args: List[Type]): Type = {
- class rawTypeRef extends TypeRef(pre, sym, args) with UniqueType
- unique(new rawTypeRef)
- }
-
- /** The canonical creator for implicit method types */
- def ImplicitMethodType(params: List[Symbol], resultType: Type): ImplicitMethodType =
- new ImplicitMethodType(params, resultType) // don't unique this!
-
/** The canonical creator for implicit method types */
def JavaMethodType(params: List[Symbol], resultType: Type): JavaMethodType =
new JavaMethodType(params, resultType) // don't unique this!
- /** Create a new MethodType of the same class as tp, i.e. keep Java / ImplicitMethodType */
+ /** Create a new MethodType of the same class as tp, i.e. keep JavaMethodType */
def copyMethodType(tp: Type, params: List[Symbol], restpe: Type): Type = tp match {
- case _: ImplicitMethodType => ImplicitMethodType(params, restpe)
case _: JavaMethodType => JavaMethodType(params, restpe)
case _ => MethodType(params, restpe)
}
@@ -2493,7 +2560,7 @@ A type's typeSymbol should never be inspected directly.
case tv@TypeVar(_, constr) => tv.applyArgs(args)
case ErrorType => tycon
case WildcardType => tycon // needed for neg/t0226
- case _ => throw new Error(debugString(tycon))
+ case _ => abort(debugString(tycon))
}
/** A creator for type parameterizations
@@ -2516,7 +2583,7 @@ A type's typeSymbol should never be inspected directly.
* (minus any SingletonClass markers),
* type variables in `tparams' occurring in contravariant positions are replaced by upper bounds,
* provided the resulting type is legal wrt to stability, and does not contain any
- * type varianble in `tparams'.
+ * type variable in `tparams'.
* The abstraction drops all type parameters that are not directly or indirectly
* referenced by type `tpe1'.
* If there are no remaining type parameters, simply returns result type `tpe'.
@@ -2645,7 +2712,6 @@ A type's typeSymbol should never be inspected directly.
* as well as their instantiations.
*/
class TypeConstraint(lo0: List[Type], hi0: List[Type], numlo0: Type, numhi0: Type) {
- //var self: Type = _ //DEBUG
def this(lo0: List[Type], hi0: List[Type]) = this(lo0, hi0, NoType, NoType)
def this() = this(List(), List())
@@ -2666,7 +2732,8 @@ A type's typeSymbol should never be inspected directly.
def addLoBound(tp: Type, numBound: Boolean = false) {
if (numBound && isNumericValueType(tp)) {
- if (!isNumericSubType(tp, numlo)) numlo = tp
+ if (numlo == NoType || isNumericSubType(numlo, tp)) numlo = tp
+ else if (!isNumericSubType(tp, numlo)) numlo = IntClass.tpe
} else {
lobounds = tp :: lobounds
}
@@ -2674,7 +2741,8 @@ A type's typeSymbol should never be inspected directly.
def addHiBound(tp: Type, numBound: Boolean = false) {
if (numBound && isNumericValueType(tp)) {
- if (!isNumericSubType(numhi, tp)) numhi = tp
+ if (numhi == NoType || isNumericSubType(tp, numhi)) numhi = tp
+ else if (!isNumericSubType(numhi, tp)) numhi = intersectionType(List(ByteClass.tpe, CharClass.tpe), ScalaPackageClass)
} else {
hibounds = tp :: hibounds
}
@@ -2771,14 +2839,14 @@ A type's typeSymbol should never be inspected directly.
val thistp1 = this(thistp)
val supertp1 = this(supertp)
if ((thistp1 eq thistp) && (supertp1 eq supertp)) tp
- else mkSuperType(thistp1, supertp1)
+ else SuperType(thistp1, supertp1)
case TypeBounds(lo, hi) =>
variance = -variance
val lo1 = this(lo)
variance = -variance
val hi1 = this(hi)
if ((lo1 eq lo) && (hi1 eq hi)) tp
- else mkTypeBounds(lo1, hi1)
+ else TypeBounds(lo1, hi1)
case BoundedWildcardType(bounds) =>
val bounds1 = this(bounds)
if (bounds1 eq bounds) tp
@@ -2964,7 +3032,7 @@ A type's typeSymbol should never be inspected directly.
// note: it's important to write the two tests in this order,
// as only typeParams forces the classfile to be read. See #400
private def isRawIfWithoutArgs(sym: Symbol) =
- !sym.typeParams.isEmpty && sym.hasFlag(JAVA)
+ sym.isClass && !sym.typeParams.isEmpty && sym.hasFlag(JAVA)
def isRaw(sym: Symbol, args: List[Type]) =
!phase.erasedTypes && isRawIfWithoutArgs(sym) && args.isEmpty
@@ -2994,7 +3062,7 @@ A type's typeSymbol should never be inspected directly.
}
def singletonBounds(hi: Type) = {
- mkTypeBounds(NothingClass.tpe, intersectionType(List(hi, SingletonClass.tpe)))
+ TypeBounds(NothingClass.tpe, intersectionType(List(hi, SingletonClass.tpe)))
}
/** A map to compute the asSeenFrom method */
@@ -3034,17 +3102,13 @@ A type's typeSymbol should never be inspected directly.
var capturedPre = emptySymMap
- def stabilize(pre: Type, clazz: Symbol): Type = {
- capturedPre get clazz match {
- case Some(qvar) =>
- qvar
- case _ =>
+ def stabilize(pre: Type, clazz: Symbol): Type =
+ capturedPre.getOrElse(clazz, {
val qvar = clazz freshExistential ".type" setInfo singletonBounds(pre)
capturedPre += (clazz -> qvar)
capturedParams = qvar :: capturedParams
qvar
- }
- }.tpe
+ }).tpe
/** Return pre.baseType(clazz), or if that's NoType and clazz is a refinement, pre itself.
* See bug397.scala for an example where the second alternative is needed.
@@ -3094,9 +3158,7 @@ A type's typeSymbol should never be inspected directly.
if ((pre eq NoType) || (pre eq NoPrefix) || !clazz.isClass) mapOver(tp)
//@M! see test pos/tcpoly_return_overriding.scala why mapOver is necessary
else {
- def throwError : Nothing = throw new Error(
- "" + tp + sym.locationString + " cannot be instantiated from " + pre.widen
- )
+ def throwError = abort("" + tp + sym.locationString + " cannot be instantiated from " + pre.widen)
def instParam(ps: List[Symbol], as: List[Type]): Type =
if (ps.isEmpty) throwError
@@ -3105,7 +3167,7 @@ A type's typeSymbol should never be inspected directly.
appliedType(as.head, args mapConserve (this)) // @M: was as.head
else instParam(ps.tail, as.tail);
val symclazz = sym.owner
- if (symclazz == clazz && (pre.widen.typeSymbol isNonBottomSubClass symclazz)) {
+ if (symclazz == clazz && !pre.isInstanceOf[TypeVar] && (pre.widen.typeSymbol isNonBottomSubClass symclazz)) {
pre.baseType(symclazz) match {
case TypeRef(_, basesym, baseargs) =>
//Console.println("instantiating " + sym + " from " + basesym + " with " + basesym.typeParams + " and " + baseargs+", pre = "+pre+", symclazz = "+symclazz);//DEBUG
@@ -3141,12 +3203,13 @@ A type's typeSymbol should never be inspected directly.
protected def matches(sym: Symbol, sym1: Symbol): Boolean = sym eq sym1
/** Map target to type, can be tuned by subclasses */
- protected def toType(fromtp: Type, t: T): Type
+ protected def toType(fromtp: Type, tp: T): Type
- def subst(tp: Type, sym: Symbol, from: List[Symbol], to: List[T]): Type =
- if (from.isEmpty) tp
- else if (matches(from.head, sym)) toType(tp, to.head)
- else subst(tp, sym, from.tail, to.tail)
+ def subst(tp: Type, sym: Symbol, from: List[Symbol], to: List[T]): Type =
+ if (from.isEmpty) tp
+ // else if (to.isEmpty) error("Unexpected substitution on '%s': from = %s but to == Nil".format(tp, from))
+ else if (matches(from.head, sym)) toType(tp, to.head)
+ else subst(tp, sym, from.tail, to.tail)
private def renameBoundSyms(tp: Type): Type = tp match {
case MethodType(ps, restp) =>
@@ -3200,6 +3263,7 @@ A type's typeSymbol should never be inspected directly.
override def apply(tp: Type): Type = if (from.isEmpty) tp else {
def subst(sym: Symbol, from: List[Symbol], to: List[Symbol]): Symbol =
if (from.isEmpty) sym
+ // else if (to.isEmpty) error("Unexpected substitution on '%s': from = %s but to == Nil".format(sym, from))
else if (matches(from.head, sym)) to.head
else subst(sym, from.tail, to.tail)
tp match {
@@ -3251,7 +3315,6 @@ A type's typeSymbol should never be inspected directly.
extends SubstMap(from, to) {
protected def toType(fromtp: Type, tp: Type) = tp
-
override def mapOver(tree: Tree, giveup: ()=>Nothing): Tree = {
object trans extends TypeMapTransformer {
override def transform(tree: Tree) =
@@ -3303,7 +3366,7 @@ A type's typeSymbol should never be inspected directly.
override val dropNonConstraintAnnotations = true
private var existSyms = immutable.Map.empty[Int, Symbol]
- def existentialsNeeded: List[Symbol] = existSyms.valuesIterator.toList
+ def existentialsNeeded: List[Symbol] = existSyms.values.toList
/* Return the type symbol for referencing a parameter index
* inside the existential quantifier. */
@@ -3512,9 +3575,9 @@ A type's typeSymbol should never be inspected directly.
}
}
- class MissingAliasException extends Exception
- val missingAliasException = new MissingAliasException
- class MissingTypeException extends Exception
+ class MissingAliasControl extends ControlThrowable
+ val missingAliasException = new MissingAliasControl
+ class MissingTypeControl extends ControlThrowable
object adaptToNewRunMap extends TypeMap {
private def adaptToNewRun(pre: Type, sym: Symbol): Symbol = {
@@ -3526,7 +3589,7 @@ A type's typeSymbol should never be inspected directly.
var rebind0 = pre.findMember(sym.name, BRIDGE, 0, true)
if (rebind0 == NoSymbol) {
if (sym.isAliasType) throw missingAliasException
- throw new MissingTypeException // For build manager purposes
+ throw new MissingTypeControl // For build manager purposes
//assert(false, pre+"."+sym+" does no longer exist, phase = "+phase)
}
/** The two symbols have the same fully qualified name */
@@ -3552,7 +3615,7 @@ A type's typeSymbol should never be inspected directly.
def apply(tp: Type): Type = tp match {
case ThisType(sym) if (sym.isModuleClass) =>
val sym1 = adaptToNewRun(sym.owner.thisType, sym)
- if (sym1 == sym) tp else mkThisType(sym1)
+ if (sym1 == sym) tp else ThisType(sym1)
case SingleType(pre, sym) =>
if (sym.isPackage) tp
else {
@@ -3571,9 +3634,9 @@ A type's typeSymbol should never be inspected directly.
if ((pre1 eq pre) && (sym1 eq sym) && (args1 eq args)/* && sym.isExternal*/) tp
else typeRef(pre1, sym1, args1)
} catch {
- case ex: MissingAliasException =>
+ case ex: MissingAliasControl =>
apply(tp.dealias)
- case _: MissingTypeException =>
+ case _: MissingTypeControl =>
NoType
}
}
@@ -3660,7 +3723,7 @@ A type's typeSymbol should never be inspected directly.
* let bt1, bt2 be the base types of tp1, tp2 relative to class bc
* Then:
* bt1 and bt2 have the same prefix, and
- * any correspondiong non-variant type arguments of bt1 and bt2 are the same
+ * any corresponding non-variant type arguments of bt1 and bt2 are the same
*/
def isPopulated(tp1: Type, tp2: Type): Boolean = {
def isConsistent(tp1: Type, tp2: Type): Boolean = (tp1, tp2) match {
@@ -3846,18 +3909,14 @@ A type's typeSymbol should never be inspected directly.
// new dependent types: probably fix this, use substSym as done for PolyType
(isSameTypes(tp1.paramTypes, tp2.paramTypes) &&
res1 =:= res2 &&
- tp1.isInstanceOf[ImplicitMethodType] == tp2.isInstanceOf[ImplicitMethodType])
+ tp1.isImplicit == tp2.isImplicit)
case (PolyType(tparams1, res1), PolyType(tparams2, res2)) =>
-// assert((tparams1 map (_.typeParams.length)) == (tparams2 map (_.typeParams.length)))
- (tparams1.length == tparams2.length &&
- (tparams1, tparams2).zipped.forall
- ((p1, p2) => p1.info =:= p2.info.substSym(tparams2, tparams1)) && //@M looks like it might suffer from same problem as #2210
- res1 =:= res2.substSym(tparams2, tparams1))
+ // assert((tparams1 map (_.typeParams.length)) == (tparams2 map (_.typeParams.length)))
+ (tparams1.length == tparams2.length) && (tparams1 corresponds tparams2)(_.info =:= _.info.substSym(tparams2, tparams1)) && // @M looks like it might suffer from same problem as #2210
+ res1 =:= res2.substSym(tparams2, tparams1)
case (ExistentialType(tparams1, res1), ExistentialType(tparams2, res2)) =>
- (tparams1.length == tparams2.length &&
- (tparams1, tparams2).zipped.forall
- ((p1, p2) => p1.info =:= p2.info.substSym(tparams2, tparams1)) && //@M looks like it might suffer from same problem as #2210
- res1 =:= res2.substSym(tparams2, tparams1))
+ (tparams1.length == tparams2.length) && (tparams1 corresponds tparams2)(_.info =:= _.info.substSym(tparams2, tparams1)) && // @M looks like it might suffer from same problem as #2210
+ res1 =:= res2.substSym(tparams2, tparams1)
case (TypeBounds(lo1, hi1), TypeBounds(lo2, hi2)) =>
lo1 =:= lo2 && hi1 =:= hi2
case (BoundedWildcardType(bounds), _) =>
@@ -3971,26 +4030,30 @@ A type's typeSymbol should never be inspected directly.
// new dependent types: probably fix this, use substSym as done for PolyType
return isSameTypes(mt1.paramTypes, mt2.paramTypes) &&
mt1.resultType =:= mt2.resultType &&
- tp1.isInstanceOf[ImplicitMethodType] == tp2.isInstanceOf[ImplicitMethodType]
+ mt1.isImplicit == mt2.isImplicit
case _ =>
}
case PolyType(tparams1, res1) =>
tp2 match {
case PolyType(tparams2, res2) =>
// assert((tparams1 map (_.typeParams.length)) == (tparams2 map (_.typeParams.length)))
- return tparams1.length == tparams2.length &&
- (tparams1, tparams2).zipped.forall((p1, p2) =>
- p1.info =:= p2.info.substSym(tparams2, tparams1)) && //@M looks like it might suffer from same problem as #2210
- res1 =:= res2.substSym(tparams2, tparams1)
+ // @M looks like it might suffer from same problem as #2210
+ return (
+ (tparams1.length == tparams2.length) && // corresponds does not check length of two sequences before checking the predicate
+ (tparams1 corresponds tparams2)(_.info =:= _.info.substSym(tparams2, tparams1)) &&
+ res1 =:= res2.substSym(tparams2, tparams1)
+ )
case _ =>
}
case ExistentialType(tparams1, res1) =>
tp2 match {
case ExistentialType(tparams2, res2) =>
- return (tparams1.length == tparams2.length &&
- (tparams1, tparams2).zipped.forall
- ((p1, p2) => p1.info =:= p2.info.substSym(tparams2, tparams1)) && //@M looks like it might suffer from same problem as #2210
- res1 =:= res2.substSym(tparams2, tparams1))
+ // @M looks like it might suffer from same problem as #2210
+ return (
+ (tparams1.length == tparams2.length) && // corresponds does not check length of two sequences before checking the predicate -- faster & needed to avoid crasher in #2956
+ (tparams1 corresponds tparams2)(_.info =:= _.info.substSym(tparams2, tparams1)) &&
+ res1 =:= res2.substSym(tparams2, tparams1)
+ )
case _ =>
}
case TypeBounds(lo1, hi1) =>
@@ -4054,12 +4117,11 @@ A type's typeSymbol should never be inspected directly.
/** Are `tps1' and `tps2' lists of pairwise equivalent
* types?
*/
- def isSameTypes(tps1: List[Type], tps2: List[Type]): Boolean =
- tps1.length == tps2.length && ((tps1, tps2).zipped forall (_ =:= _))
+ def isSameTypes(tps1: List[Type], tps2: List[Type]): Boolean = (tps1 corresponds tps2)(_ =:= _)
- private var pendingSubTypes = new collection.mutable.HashSet[SubTypePair]
+ private val pendingSubTypes = new collection.mutable.HashSet[SubTypePair]
private var basetypeRecursions: Int = 0
- private var pendingBaseTypes = new collection.mutable.HashSet[Type]
+ private val pendingBaseTypes = new collection.mutable.HashSet[Type]
def isSubType(tp1: Type, tp2: Type): Boolean = isSubType(tp1, tp2, AnyDepth)
@@ -4074,12 +4136,12 @@ A type's typeSymbol should never be inspected directly.
else
try {
pendingSubTypes += p
- isSubType0(tp1, tp2, depth)
+ isSubType2(tp1, tp2, depth)
} finally {
pendingSubTypes -= p
}
} else {
- isSubType0(tp1, tp2, depth)
+ isSubType2(tp1, tp2, depth)
}
}
} finally {
@@ -4135,18 +4197,17 @@ A type's typeSymbol should never be inspected directly.
((tp1.normalize, tp2.normalize) match {
case (PolyType(tparams1, res1), PolyType(tparams2, res2)) => // @assume tp1.isHigherKinded && tp2.isHigherKinded (as they were both normalized to PolyType)
tparams1.length == tparams2.length && {
- if(tparams1.isEmpty) res1 <:< res2 // fast-path: monomorphic nullary method type
- else if(tparams1.head.owner.isMethod) { // fast-path: polymorphic method type -- type params cannot be captured
- ((tparams1, tparams2).zipped forall ((p1, p2) =>
- p2.info.substSym(tparams2, tparams1) <:< p1.info)) &&
+ if (tparams1.isEmpty) res1 <:< res2 // fast-path: monomorphic nullary method type
+ else if (tparams1.head.owner.isMethod) { // fast-path: polymorphic method type -- type params cannot be captured
+ (tparams1 corresponds tparams2)((p1, p2) => p2.info.substSym(tparams2, tparams1) <:< p1.info) &&
res1 <:< res2.substSym(tparams2, tparams1)
} else { // normalized higher-kinded type
//@M for an example of why we need to generate fresh symbols, see neg/tcpoly_ticket2101.scala
val tpsFresh = cloneSymbols(tparams1) // @M cloneSymbols(tparams2) should be equivalent -- TODO: check
- ((tparams1, tparams2).zipped forall ((p1, p2) =>
- p2.info.substSym(tparams2, tpsFresh) <:< p1.info.substSym(tparams1, tpsFresh))) &&
- res1.substSym(tparams1, tpsFresh) <:< res2.substSym(tparams2, tpsFresh)
+ (tparams1 corresponds tparams2)((p1, p2) =>
+ p2.info.substSym(tparams2, tpsFresh) <:< p1.info.substSym(tparams1, tpsFresh)) && // @PP: corresponds
+ res1.substSym(tparams1, tpsFresh) <:< res2.substSym(tparams2, tpsFresh)
//@M the forall in the previous test could be optimised to the following,
// but not worth the extra complexity since it only shaves 1s from quick.comp
@@ -4171,10 +4232,6 @@ A type's typeSymbol should never be inspected directly.
isSubArgs(tps1.tail, tps2.tail, tparams.tail)
)
- def isSubType0(tp1: Type, tp2: Type, depth: Int): Boolean = {
- isSubType2(tp1, tp2, depth)
- }
-
def differentOrNone(tp1: Type, tp2: Type) = if (tp1 eq tp2) NoType else tp1
/** Does type `tp1' conform to `tp2'?
@@ -4196,7 +4253,7 @@ A type's typeSymbol should never be inspected directly.
* - bind TypeVars on the right, if lhs is not Annotated nor BoundedWildcard
* - handle common cases for first-kind TypeRefs on both sides as a fast path.
*/
- def firstTry = tp2 match {
+ def firstTry = { incCounter(ctr1); tp2 match {
// fast path: two typerefs, none of them HK
case tr2: TypeRef =>
tp1 match {
@@ -4231,14 +4288,14 @@ A type's typeSymbol should never be inspected directly.
}
case _ =>
secondTry
- }
+ }}
/** Second try, on the left:
* - unwrap AnnotatedTypes, BoundedWildcardTypes,
* - bind typevars,
* - handle existential types by skolemization.
*/
- def secondTry = tp1 match {
+ def secondTry = { incCounter(ctr2); tp1 match {
case AnnotatedType(_, _, _) =>
tp1.withoutAnnotations <:< tp2.withoutAnnotations && annotationsConform(tp1, tp2)
case BoundedWildcardType(bounds) =>
@@ -4254,26 +4311,32 @@ A type's typeSymbol should never be inspected directly.
}
case _ =>
thirdTry
- }
+ }}
def thirdTryRef(tp1: Type, tp2: TypeRef): Boolean = {
+ incCounter(ctr3);
val sym2 = tp2.sym
- if (sym2.isAliasType) {
- isSubType(tp1.normalize, tp2.normalize, depth)
- } else if (sym2.isAbstractType) {
- val tp2a = tp2.bounds.lo
-// isDifferentTypeConstructor(tp2a, tp2.pre, sym2) && tp1 <:< tp2a || fourthTry
- isDifferentTypeConstructor(tp2, tp2a) && tp1 <:< tp2a || fourthTry
- } else if (sym2 == NotNullClass) {
- tp1.isNotNull
- } else if (sym2 == SingletonClass) {
- tp1.isStable
- } else if (isRaw(sym2, tp2.args)) {
- isSubType(tp1, rawToExistential(tp2), depth)
- } else if (sym2.isRefinementClass) {
- isSubType(tp1, sym2.info, depth)
- } else {
- fourthTry
+ sym2 match {
+ case _: ClassSymbol =>
+ if (sym2 == NotNullClass)
+ tp1.isNotNull
+ else if (sym2 == SingletonClass)
+ tp1.isStable
+ else if (isRaw(sym2, tp2.args))
+ isSubType(tp1, rawToExistential(tp2), depth)
+ else if (sym2.name == nme.REFINE_CLASS_NAME.toTypeName)
+ isSubType(tp1, sym2.info, depth)
+ else
+ fourthTry
+ case _: TypeSymbol =>
+ if (sym2 hasFlag DEFERRED) {
+ val tp2a = tp2.bounds.lo
+ isDifferentTypeConstructor(tp2, tp2a) && tp1 <:< tp2a || fourthTry
+ } else {
+ isSubType(tp1.normalize, tp2.normalize, depth)
+ }
+ case _ =>
+ fourthTry
}
}
@@ -4282,7 +4345,7 @@ A type's typeSymbol should never be inspected directly.
* - handle typerefs, existentials, and notnull types.
* - handle left+right method types, polytypes, typebounds
*/
- def thirdTry = tp2 match {
+ def thirdTry = { incCounter(ctr3); tp2 match {
case tr2: TypeRef =>
thirdTryRef(tp1, tr2)
case rt2: RefinedType =>
@@ -4294,13 +4357,13 @@ A type's typeSymbol should never be inspected directly.
tp1.isNotNull && tp1 <:< nn2.underlying
case mt2: MethodType =>
tp1 match {
- case MethodType(params1, res1) =>
+ case mt1 @ MethodType(params1, res1) =>
val params2 = mt2.params
val res2 = mt2.resultType
(params1.length == params2.length &&
- matchingParams(params1, params2, tp1.isInstanceOf[JavaMethodType], tp2.isInstanceOf[JavaMethodType]) &&
+ matchingParams(params1, params2, mt1.isJava, mt2.isJava) &&
(res1 <:< res2) &&
- tp1.isInstanceOf[ImplicitMethodType] == tp2.isInstanceOf[ImplicitMethodType])
+ mt1.isImplicit == mt2.isImplicit)
case _ =>
false
}
@@ -4321,34 +4384,39 @@ A type's typeSymbol should never be inspected directly.
}
case _ =>
fourthTry
- }
+ }}
/** Fourth try, on the left:
* - handle typerefs, refined types, notnull and singleton types.
*/
- def fourthTry = tp1 match {
+ def fourthTry = { incCounter(ctr4); tp1 match {
case tr1 @ TypeRef(_, sym1, _) =>
- if (sym1.isAliasType) {
- isSubType(tp1.normalize, tp2.normalize, depth)
- } else if (sym1.isAbstractType) {
- val tp1a = tp1.bounds.hi
- isDifferentTypeConstructor(tp1, tp1a) && tp1a <:< tp2
- } else if (sym1 == NothingClass) {
- true
- } else if (sym1 == NullClass) {
- tp2 match {
- case TypeRef(_, sym2, _) =>
- sym2.isClass && (sym2 isNonBottomSubClass ObjectClass) &&
- !(tp2.normalize.typeSymbol isNonBottomSubClass NotNullClass)
- case _ =>
- isSingleType(tp2) && tp1 <:< tp2.widen
- }
- } else if (isRaw(sym1, tr1.args)) {
- isSubType(rawToExistential(tp1), tp2, depth)
- } else if (sym1.isRefinementClass) {
- isSubType(sym1.info, tp2, depth)
- } else {
- false
+ sym1 match {
+ case _: ClassSymbol =>
+ if (sym1 == NothingClass)
+ true
+ else if (sym1 == NullClass)
+ tp2 match {
+ case TypeRef(_, sym2, _) =>
+ sym2.isClass && (sym2 isNonBottomSubClass ObjectClass) &&
+ !(tp2.normalize.typeSymbol isNonBottomSubClass NotNullClass)
+ case _ =>
+ isSingleType(tp2) && tp1 <:< tp2.widen
+ }
+ else if (isRaw(sym1, tr1.args))
+ isSubType(rawToExistential(tp1), tp2, depth)
+ else
+ sym1.name == nme.REFINE_CLASS_NAME.toTypeName &&
+ isSubType(sym1.info, tp2, depth)
+ case _: TypeSymbol =>
+ if (sym1 hasFlag DEFERRED) {
+ val tp1a = tp1.bounds.hi
+ isDifferentTypeConstructor(tp1, tp1a) && tp1a <:< tp2
+ } else {
+ isSubType(tp1.normalize, tp2.normalize, depth)
+ }
+ case _ =>
+ false
}
case RefinedType(parents1, _) =>
parents1 exists (_ <:< tp2)
@@ -4356,7 +4424,7 @@ A type's typeSymbol should never be inspected directly.
tp1.underlying <:< tp2
case _ =>
false
- }
+ }}
firstTry
}
@@ -4365,8 +4433,7 @@ A type's typeSymbol should never be inspected directly.
* that all elements of `tps1' conform to corresponding elements
* of `tps2'?
*/
- def isSubTypes(tps1: List[Type], tps2: List[Type]): Boolean =
- tps1.length == tps2.length && ((tps1, tps2).zipped forall (_ <:< _))
+ def isSubTypes(tps1: List[Type], tps2: List[Type]): Boolean = (tps1 corresponds tps2)(_ <:< _)
/** Does type `tp' implement symbol `sym' with same or
* stronger type? Exact only if `sym' is a member of some
@@ -4407,13 +4474,13 @@ A type's typeSymbol should never be inspected directly.
alwaysMatchSimple || tp1 =:= tp2
}
tp1 match {
- case MethodType(params1, res1) =>
+ case mt1 @ MethodType(params1, res1) =>
tp2 match {
- case MethodType(params2, res2) =>
- params1.length == params2.length && // useful pre-secreening optimization
- matchingParams(params1, params2, tp1.isInstanceOf[JavaMethodType], tp2.isInstanceOf[JavaMethodType]) &&
+ case mt2 @ MethodType(params2, res2) =>
+ params1.length == params2.length && // useful pre-screening optimization
+ matchingParams(params1, params2, mt1.isJava, mt2.isJava) &&
matchesType(res1, res2, alwaysMatchSimple) &&
- tp1.isInstanceOf[ImplicitMethodType] == tp2.isInstanceOf[ImplicitMethodType]
+ mt1.isImplicit == mt2.isImplicit
case PolyType(List(), res2) =>
if (params1.isEmpty) matchesType(res1, res2, alwaysMatchSimple)
else matchesType(tp1, res2, alwaysMatchSimple)
@@ -4457,7 +4524,7 @@ A type's typeSymbol should never be inspected directly.
params1.length == params2.length && // useful pre-secreening optimization
matchingParams(params1, params2, tp1.isInstanceOf[JavaMethodType], tp2.isInstanceOf[JavaMethodType]) &&
matchesType(res1, res2, alwaysMatchSimple) &&
- tp1.isInstanceOf[ImplicitMethodType] == tp2.isInstanceOf[ImplicitMethodType]
+ tp1.isImplicit == tp2.isImplicit
case (PolyType(tparams1, res1), PolyType(tparams2, res2)) =>
matchesQuantified(tparams1, tparams2, res1, res2)
case (PolyType(List(), rtp1), MethodType(List(), rtp2)) =>
@@ -4598,8 +4665,10 @@ A type's typeSymbol should never be inspected directly.
* @return ...
*/
def isWithinBounds(pre: Type, owner: Symbol, tparams: List[Symbol], targs: List[Type]): Boolean = {
- val bounds = instantiatedBounds(pre, owner, tparams, targs)
- (bounds, targs).zipped forall (_ containsType _)
+ var bounds = instantiatedBounds(pre, owner, tparams, targs)
+ if (targs.exists(_.annotations.nonEmpty))
+ bounds = adaptBoundsToAnnotations(bounds, tparams, targs)
+ (bounds corresponds targs)(_ containsType _) // @PP: corresponds
}
def instantiatedBounds(pre: Type, owner: Symbol, tparams: List[Symbol], targs: List[Type]): List[TypeBounds] =
@@ -4701,7 +4770,7 @@ A type's typeSymbol should never be inspected directly.
res
case TypeVar(_, constr) =>
if (constr.instValid) constr.inst
- else throw new Error("trying to do lub/glb of typevar "+tp)
+ else abort("trying to do lub/glb of typevar "+tp)
case t => t
}
val strippedTypes = ts mapConserve (stripType)
@@ -4709,25 +4778,39 @@ A type's typeSymbol should never be inspected directly.
}
def weakLub(ts: List[Type]) =
- if (ts.nonEmpty && (ts forall isNumericValueType)) numericLub(ts)
- else lub(ts)
-
- def weakGlb(ts: List[Type]) =
- if (ts.nonEmpty && (ts forall isNumericValueType)) numericGlb(ts)
- else glb(ts)
+ if (ts.nonEmpty && (ts forall isNumericValueType)) (numericLub(ts), true)
+ else if (ts.nonEmpty && (ts exists (_.annotations.nonEmpty)))
+ (annotationsLub(lub(ts map (_.withoutAnnotations)), ts), true)
+ else (lub(ts), false)
+
+ def weakGlb(ts: List[Type]) = {
+ if (ts.nonEmpty && (ts forall isNumericValueType)) {
+ val nglb = numericGlb(ts)
+ if (nglb != NoType) (nglb, true)
+ else (glb(ts), false)
+ } else if (ts.nonEmpty && (ts exists (_.annotations.nonEmpty))) {
+ (annotationsGlb(glb(ts map (_.withoutAnnotations)), ts), true)
+ } else (glb(ts), false)
+ }
def numericLub(ts: List[Type]) =
- (ByteClass.tpe /: ts) ((t1, t2) => if (isNumericSubType(t1, t2)) t2 else t1)
+ ts reduceLeft ((t1, t2) =>
+ if (isNumericSubType(t1, t2)) t2
+ else if (isNumericSubType(t2, t1)) t1
+ else IntClass.tpe)
def numericGlb(ts: List[Type]) =
- (DoubleClass.tpe /: ts) ((t1, t2) => if (isNumericSubType(t1, t2)) t1 else t2)
+ ts reduceLeft ((t1, t2) =>
+ if (isNumericSubType(t1, t2)) t1
+ else if (isNumericSubType(t2, t1)) t2
+ else NoType)
def isWeakSubType(tp1: Type, tp2: Type) =
tp1.deconst.normalize match {
case TypeRef(_, sym1, _) if isNumericValueClass(sym1) =>
tp2.deconst.normalize match {
case TypeRef(_, sym2, _) if isNumericValueClass(sym2) =>
- sym1 == sym2 || numericWidth(sym1) < numericWidth(sym2)
+ isNumericSubClass(sym1, sym2)
case tv2 @ TypeVar(_, _) =>
tv2.registerBound(tp1, isLowerBound = true, numBound = true)
case _ =>
@@ -4746,7 +4829,7 @@ A type's typeSymbol should never be inspected directly.
def isNumericSubType(tp1: Type, tp2: Type) =
isNumericValueType(tp1) && isNumericValueType(tp2) &&
- (tp1.typeSymbol == tp2.typeSymbol || numericWidth(tp1.typeSymbol) < numericWidth(tp2.typeSymbol))
+ isNumericSubClass(tp1.typeSymbol, tp2.typeSymbol)
def lub(ts: List[Type]): Type = lub(ts, lubDepth(ts))
@@ -4756,14 +4839,13 @@ A type's typeSymbol should never be inspected directly.
case List() => NothingClass.tpe
case List(t) => t
case ts @ PolyType(tparams, _) :: _ =>
- PolyType(
- (tparams, matchingBounds(ts, tparams).transpose).zipped map
- ((tparam, bounds) => tparam.cloneSymbol.setInfo(glb(bounds, depth))),
- lub0(matchingInstTypes(ts, tparams)))
+ val tparams1 = (tparams, matchingBounds(ts, tparams).transpose).zipped map
+ ((tparam, bounds) => tparam.cloneSymbol.setInfo(glb(bounds, depth)))
+ PolyType(tparams1, lub0(matchingInstTypes(ts, tparams1)))
case ts @ MethodType(params, _) :: rest =>
MethodType(params, lub0(matchingRestypes(ts, params map (_.tpe))))
case ts @ TypeBounds(_, _) :: rest =>
- mkTypeBounds(glb(ts map (_.bounds.lo), depth), lub(ts map (_.bounds.hi), depth))
+ TypeBounds(glb(ts map (_.bounds.lo), depth), lub(ts map (_.bounds.hi), depth))
case ts0 =>
val (ts, tparams) = stripExistentialsAndTypeVars(ts0)
val bts: List[BaseTypeSeq] = ts map (_.baseTypeSeq)
@@ -4787,14 +4869,14 @@ A type's typeSymbol should never be inspected directly.
val symtypes =
(narrowts, syms).zipped map ((t, sym) => t.memberInfo(sym).substThis(t.typeSymbol, lubThisType))
if (proto.isTerm) // possible problem: owner of info is still the old one, instead of new refinement class
- proto.cloneSymbol(lubRefined.typeSymbol).setInfo(lub(symtypes, decr(depth)))
+ proto.cloneSymbol(lubRefined.typeSymbol).setInfoOwnerAdjusted(lub(symtypes, decr(depth)))
else if (symtypes.tail forall (symtypes.head =:=))
- proto.cloneSymbol(lubRefined.typeSymbol).setInfo(symtypes.head)
+ proto.cloneSymbol(lubRefined.typeSymbol).setInfoOwnerAdjusted(symtypes.head)
else {
def lubBounds(bnds: List[TypeBounds]): TypeBounds =
- mkTypeBounds(glb(bnds map (_.lo), decr(depth)), lub(bnds map (_.hi), decr(depth)))
+ TypeBounds(glb(bnds map (_.lo), decr(depth)), lub(bnds map (_.hi), decr(depth)))
lubRefined.typeSymbol.newAbstractType(proto.pos, proto.name)
- .setInfo(lubBounds(symtypes map (_.bounds)))
+ .setInfoOwnerAdjusted(lubBounds(symtypes map (_.bounds)))
}
}
}
@@ -4840,7 +4922,7 @@ A type's typeSymbol should never be inspected directly.
val GlbFailure = new Throwable
/** A global counter for glb calls in the `specializes' query connected to the `addMembers'
- * call in `glb'. There's a possible inifinite recursion when `specializes' calls
+ * call in `glb'. There's a possible infinite recursion when `specializes' calls
* memberType, which calls baseTypeSeq, which calls mergePrefixAndArgs, which calls glb.
* The counter breaks this recursion after two calls.
* If the recursion is broken, no member is added to the glb.
@@ -4856,14 +4938,13 @@ A type's typeSymbol should never be inspected directly.
case List() => AnyClass.tpe
case List(t) => t
case ts @ PolyType(tparams, _) :: _ =>
- PolyType(
- (tparams, matchingBounds(ts, tparams).transpose).zipped map
- ((tparam, bounds) => tparam.cloneSymbol.setInfo(lub(bounds, depth))),
- glb0(matchingInstTypes(ts, tparams)))
+ val tparams1 = (tparams, matchingBounds(ts, tparams).transpose).zipped map
+ ((tparam, bounds) => tparam.cloneSymbol.setInfo(lub(bounds, depth)))
+ PolyType(tparams1, glb0(matchingInstTypes(ts, tparams1)))
case ts @ MethodType(params, _) :: rest =>
MethodType(params, glb0(matchingRestypes(ts, params map (_.tpe))))
case ts @ TypeBounds(_, _) :: rest =>
- mkTypeBounds(lub(ts map (_.bounds.lo), depth), glb(ts map (_.bounds.hi), depth))
+ TypeBounds(lub(ts map (_.bounds.lo), depth), glb(ts map (_.bounds.hi), depth))
case ts0 =>
try {
val (ts, tparams) = stripExistentialsAndTypeVars(ts0)
@@ -4893,7 +4974,7 @@ A type's typeSymbol should never be inspected directly.
) yield alt
val symtypes = syms map glbThisType.memberInfo
assert(!symtypes.isEmpty)
- proto.cloneSymbol(glbRefined.typeSymbol).setInfo(
+ proto.cloneSymbol(glbRefined.typeSymbol).setInfoOwnerAdjusted(
if (proto.isTerm) glb(symtypes, decr(depth))
else {
def isTypeBound(tp: Type) = tp match {
@@ -4903,13 +4984,13 @@ A type's typeSymbol should never be inspected directly.
def glbBounds(bnds: List[Type]): TypeBounds = {
val lo = lub(bnds map (_.bounds.lo), decr(depth))
val hi = glb(bnds map (_.bounds.hi), decr(depth))
- if (lo <:< hi) mkTypeBounds(lo, hi)
+ if (lo <:< hi) TypeBounds(lo, hi)
else throw GlbFailure
}
val symbounds = symtypes filter isTypeBound
var result: Type =
if (symbounds.isEmpty)
- mkTypeBounds(NothingClass.tpe, AnyClass.tpe)
+ TypeBounds(NothingClass.tpe, AnyClass.tpe)
else glbBounds(symbounds)
for (t <- symtypes if !isTypeBound(t))
if (result.bounds containsType t) result = t
@@ -4970,7 +5051,7 @@ A type's typeSymbol should never be inspected directly.
* of types `tps'. All types in `tps' are typerefs or singletypes
* with the same symbol.
* Return `Some(x)' if the computation succeeds with result `x'.
- * Return `None' if the computuation fails.
+ * Return `None' if the computation fails.
*/
def mergePrefixAndArgs(tps: List[Type], variance: Int, depth: Int): Option[Type] = tps match {
case List(tp) =>
@@ -4996,7 +5077,7 @@ A type's typeSymbol should never be inspected directly.
else { // Martin: I removed this, because incomplete. Not sure there is a good way to fix it. For the moment we
// just err on the conservative side, i.e. with a bound that is too high.
// if(!(tparam.info.bounds contains tparam)){ //@M can't deal with f-bounds, see #2251
- val qvar = commonOwner(as) freshExistential "" setInfo mkTypeBounds(g, l)
+ val qvar = commonOwner(as) freshExistential "" setInfo TypeBounds(g, l)
capturedParams += qvar
qvar.tpe
}
@@ -5076,15 +5157,15 @@ A type's typeSymbol should never be inspected directly.
// Errors and Diagnostics -----------------------------------------------------
- /** An exception signalling a type error */
- class TypeError(var pos: Position, val msg: String) extends java.lang.Error(msg) {
+ /** A throwable signalling a type error */
+ class TypeError(var pos: Position, val msg: String) extends Throwable(msg) with ControlThrowable {
def this(msg: String) = this(NoPosition, msg)
}
- class NoCommonType(tps: List[Type]) extends java.lang.Error(
- "lub/glb of incompatible types: " + tps.mkString("", " and ", ""))
+ class NoCommonType(tps: List[Type]) extends Throwable(
+ "lub/glb of incompatible types: " + tps.mkString("", " and ", "")) with ControlThrowable
- /** An exception signalling a malformed type */
+ /** A throwable signalling a malformed type */
class MalformedType(msg: String) extends TypeError(msg) {
def this(pre: Type, tp: String) = this("malformed type: " + pre + "#" + tp)
}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileConstants.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileConstants.scala
index b685fe1c2f..a139f605b0 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileConstants.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileConstants.scala
@@ -77,7 +77,7 @@ object ClassfileConstants {
final val CONSTANT_INTFMETHODREF = 11
final val CONSTANT_NAMEANDTYPE = 12
- // tags desribing the type of a literal in attribute values
+ // tags describing the type of a literal in attribute values
final val BYTE_TAG = 'B'
final val CHAR_TAG = 'C'
final val DOUBLE_TAG = 'D'
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index 5c4679625b..3a8d93a7a7 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -14,8 +14,8 @@ import java.lang.Integer.toHexString
import scala.collection.immutable.{Map, ListMap}
import scala.collection.mutable.{ListBuffer, ArrayBuffer}
import scala.tools.nsc.io.AbstractFile
-import scala.tools.nsc.util.{Position, NoPosition, ClassRep}
import scala.annotation.switch
+import reflect.generic.PickleBuffer
/** This abstract class implements a class file parser.
*
@@ -36,6 +36,7 @@ abstract class ClassfileParser {
protected var staticDefs: Scope = _ // the scope of all static definitions
protected var pool: ConstantPool = _ // the classfile's constant pool
protected var isScala: Boolean = _ // does class file describe a scala class?
+ protected var isScalaAnnot: Boolean = _ // does class file describe a scala class with its pickled info in an annotation?
protected var isScalaRaw: Boolean = _ // this class file is a scala class with no pickled info
protected var hasMeta: Boolean = _ // does class file contain jaco meta attribute?s
protected var busy: Option[Symbol] = None // lock to detect recursive reads
@@ -84,11 +85,11 @@ abstract class ClassfileParser {
*/
this.in = new AbstractFileReader(file)
if (root.isModule) {
- this.clazz = root.linkedClassOfModule
+ this.clazz = root.companionClass
this.staticModule = root
} else {
this.clazz = root
- this.staticModule = root.linkedModuleOfClass
+ this.staticModule = root.companionModule
}
this.isScala = false
this.hasMeta = false
@@ -96,7 +97,8 @@ abstract class ClassfileParser {
parseHeader
this.pool = new ConstantPool
parseClass()
- } catch {
+ }
+ catch {
case e: MissingRequirementError => handleMissing(e)
case e: RuntimeException => handleError(e)
}
@@ -244,7 +246,7 @@ abstract class ClassfileParser {
log("Couldn't find " + name + ": " + tpe + " inside: \n" + ownerTpe)
f = if (tpe.isInstanceOf[MethodType]) owner.newMethod(owner.pos, name).setInfo(tpe)
else owner.newValue(owner.pos, name).setInfo(tpe).setFlag(MUTABLE)
- log("created fake member " + f.fullNameString)
+ log("created fake member " + f.fullName)
}
// println("\townerTpe.decls: " + ownerTpe.decls)
// println("Looking for: " + name + ": " + tpe + " inside: " + ownerTpe.typeSymbol + "\n\tand found: " + ownerTpe.members)
@@ -353,6 +355,22 @@ abstract class ClassfileParser {
}
}
+ def getBytes(index: Int): Array[Byte] = {
+ if (index <= 0 || len <= index) errorBadIndex(index)
+ var value = values(index).asInstanceOf[Array[Byte]]
+ if (value eq null) {
+ val start = starts(index)
+ if (in.buf(start).toInt != CONSTANT_UTF8) errorBadTag(start)
+ val len = in.getChar(start + 1)
+ val bytes = new Array[Byte](len)
+ Array.copy(in.buf, start + 3, bytes, 0, len)
+ val decodedLength = reflect.generic.ByteCodecs.decode(bytes)
+ value = bytes.take(decodedLength)
+ values(index) = value
+ }
+ value
+ }
+
/** Throws an exception signaling a bad constant index. */
private def errorBadIndex(index: Int) =
throw new RuntimeException("bad constant pool index: " + index + " at pos: " + in.bp)
@@ -421,12 +439,17 @@ abstract class ClassfileParser {
in.bp += ifaces * 2 // .. and iface count interfaces
List(definitions.AnyRefClass.tpe) // dummy superclass, will be replaced by pickled information
} else {
- val superType = if (isAnnotation) { in.nextChar; definitions.AnnotationClass.tpe }
- else pool.getSuperClass(in.nextChar).tpe
- val ifaceCount = in.nextChar
- var ifaces = for (i <- List.range(0, ifaceCount)) yield pool.getSuperClass(in.nextChar).tpe
- if (isAnnotation) ifaces = definitions.ClassfileAnnotationClass.tpe :: ifaces
- superType :: ifaces
+ try {
+ loaders.parentsLevel += 1
+ val superType = if (isAnnotation) { in.nextChar; definitions.AnnotationClass.tpe }
+ else pool.getSuperClass(in.nextChar).tpe
+ val ifaceCount = in.nextChar
+ var ifaces = for (i <- List.range(0, ifaceCount)) yield pool.getSuperClass(in.nextChar).tpe
+ if (isAnnotation) ifaces = definitions.ClassfileAnnotationClass.tpe :: ifaces
+ superType :: ifaces
+ } finally {
+ loaders.parentsLevel -= 1
+ }
}
}
@@ -440,6 +463,7 @@ abstract class ClassfileParser {
// get the class file parser to reuse scopes.
instanceDefs = new Scope
staticDefs = new Scope
+
val classInfo = ClassInfoType(parseParents, instanceDefs, clazz)
val staticInfo = ClassInfoType(List(), staticDefs, statics)
@@ -464,22 +488,32 @@ abstract class ClassfileParser {
// attributes now depend on having infos set already
parseAttributes(clazz, classInfo)
- in.bp = curbp
- val fieldCount = in.nextChar
- for (i <- 0 until fieldCount) parseField()
- sawPrivateConstructor = false
- val methodCount = in.nextChar
- for (i <- 0 until methodCount) parseMethod()
- if (!sawPrivateConstructor &&
- (instanceDefs.lookup(nme.CONSTRUCTOR) == NoSymbol &&
- (sflags & INTERFACE) == 0L))
- {
- //Console.println("adding constructor to " + clazz);//DEBUG
- instanceDefs.enter(
- clazz.newConstructor(NoPosition)
- .setFlag(clazz.flags & ConstrFlags)
- .setInfo(MethodType(List(), clazz.tpe)))
+ loaders.pendingLoadActions = { () =>
+ in.bp = curbp
+ val fieldCount = in.nextChar
+ for (i <- 0 until fieldCount) parseField()
+ sawPrivateConstructor = false
+ val methodCount = in.nextChar
+ for (i <- 0 until methodCount) parseMethod()
+ if (!sawPrivateConstructor &&
+ (instanceDefs.lookup(nme.CONSTRUCTOR) == NoSymbol &&
+ (sflags & INTERFACE) == 0L))
+ {
+ //Console.println("adding constructor to " + clazz);//DEBUG
+ instanceDefs.enter(
+ clazz.newConstructor(NoPosition)
+ .setFlag(clazz.flags & ConstrFlags)
+ .setInfo(MethodType(List(), clazz.tpe)))
+ }
+ ()
+ } :: loaders.pendingLoadActions
+ if (loaders.parentsLevel == 0) {
+ while (!loaders.pendingLoadActions.isEmpty) {
+ val item = loaders.pendingLoadActions.head
+ loaders.pendingLoadActions = loaders.pendingLoadActions.tail
+ item()
}
+ }
} else
parseAttributes(clazz, classInfo)
}
@@ -508,7 +542,7 @@ abstract class ClassfileParser {
val info = pool.getType(in.nextChar)
val sym = getOwner(jflags)
.newValue(NoPosition, name).setFlag(sflags)
- sym.setInfo(if ((jflags & JAVA_ACC_ENUM) == 0) info else mkConstantType(Constant(sym)))
+ sym.setInfo(if ((jflags & JAVA_ACC_ENUM) == 0) info else ConstantType(Constant(sym)))
setPrivateWithin(sym, jflags)
parseAttributes(sym, info)
getScope(jflags).enter(sym)
@@ -589,6 +623,8 @@ abstract class ClassfileParser {
while (!isDelimiter(sig(index))) { index += 1 }
sig.subName(start, index)
}
+ def existentialType(tparams: List[Symbol], tp: Type): Type =
+ if (tparams.isEmpty) tp else ExistentialType(tparams, tp)
def sig2type(tparams: Map[Name,Symbol], skiptvs: Boolean): Type = {
val tag = sig(index); index += 1
tag match {
@@ -620,12 +656,12 @@ abstract class ClassfileParser {
case variance @ ('+' | '-' | '*') =>
index += 1
val bounds = variance match {
- case '+' => mkTypeBounds(definitions.NothingClass.tpe,
- sig2type(tparams, skiptvs))
- case '-' => mkTypeBounds(sig2type(tparams, skiptvs),
- definitions.AnyClass.tpe)
- case '*' => mkTypeBounds(definitions.NothingClass.tpe,
- definitions.AnyClass.tpe)
+ case '+' => TypeBounds(definitions.NothingClass.tpe,
+ sig2type(tparams, skiptvs))
+ case '-' => TypeBounds(sig2type(tparams, skiptvs),
+ definitions.AnyClass.tpe)
+ case '*' => TypeBounds(definitions.NothingClass.tpe,
+ definitions.AnyClass.tpe)
}
val newtparam = sym.newExistential(sym.pos, "?"+i) setInfo bounds
existentials += newtparam
@@ -637,14 +673,14 @@ abstract class ClassfileParser {
}
accept('>')
assert(xs.length > 0)
- existentialAbstraction(existentials.toList, TypeRef(pre, classSym, xs.toList))
+ existentialType(existentials.toList, TypeRef(pre, classSym, xs.toList))
} else if (classSym.isMonomorphicType) {
tp
} else {
// raw type - existentially quantify all type parameters
val eparams = typeParamsToExistentials(classSym, classSym.unsafeTypeParams)
val t = TypeRef(pre, classSym, eparams.map(_.tpe))
- val res = existentialAbstraction(eparams, t)
+ val res = existentialType(eparams, t)
if (settings.debug.value && settings.verbose.value) println("raw type " + classSym + " -> " + res)
res
}
@@ -668,7 +704,7 @@ abstract class ClassfileParser {
while ('0' <= sig(index) && sig(index) <= '9') index += 1
var elemtp = sig2type(tparams, skiptvs)
// make unbounded Array[T] where T is a type variable into Array[T with Object]
- // (this is necessary because such arrays have a representation which is incompatibe
+ // (this is necessary because such arrays have a representation which is incompatible
// with arrays of primitive types.
if (elemtp.typeSymbol.isAbstractType && !(elemtp <:< definitions.ObjectClass.tpe))
elemtp = intersectionType(List(elemtp, definitions.ObjectClass.tpe))
@@ -702,7 +738,7 @@ abstract class ClassfileParser {
if (sig(index) != ':') // guard against empty class bound
ts += objToAny(sig2type(tparams, skiptvs))
}
- mkTypeBounds(definitions.NothingClass.tpe, intersectionType(ts.toList, sym))
+ TypeBounds(definitions.NothingClass.tpe, intersectionType(ts.toList, sym))
}
var tparams = classTParams
@@ -781,12 +817,15 @@ abstract class ClassfileParser {
case nme.ConstantValueATTR =>
val c = pool.getConstant(in.nextChar)
val c1 = convertTo(c, symtype)
- if (c1 ne null) sym.setInfo(mkConstantType(c1))
+ if (c1 ne null) sym.setInfo(ConstantType(c1))
else println("failure to convert " + c + " to " + symtype); //debug
case nme.ScalaSignatureATTR =>
- unpickler.unpickle(in.buf, in.bp, clazz, staticModule, in.file.toString())
+ if (!isScalaAnnot) {
+ if (settings.debug.value)
+ global.inform("warning: symbol " + sym.fullName + " has pickled signature in attribute")
+ unpickler.unpickle(in.buf, in.bp, clazz, staticModule, in.file.toString())
+ }
in.skip(attrLen)
- this.isScala = true
case nme.ScalaATTR =>
isScalaRaw = true
case nme.JacoMetaATTR =>
@@ -797,13 +836,21 @@ abstract class ClassfileParser {
case nme.AnnotationDefaultATTR =>
sym.addAnnotation(AnnotationInfo(definitions.AnnotationDefaultAttr.tpe, List(), List()))
in.skip(attrLen)
- // Java annotatinos on classes / methods / fields with RetentionPolicy.RUNTIME
+ // Java annotations on classes / methods / fields with RetentionPolicy.RUNTIME
case nme.RuntimeAnnotationATTR =>
- if (!isScala) {
- // no need to read annotations if isScala, ClassfileAnnotations are pickled
- parseAnnotations(attrLen)
+ if (isScalaAnnot || !isScala) {
+ val scalaSigAnnot = parseAnnotations(attrLen)
+ if (isScalaAnnot)
+ scalaSigAnnot match {
+ case Some(san: AnnotationInfo) =>
+ val bytes =
+ san.assocs.find({ _._1 == nme.bytes }).get._2.asInstanceOf[ScalaSigBytes].bytes
+ unpickler.unpickle(bytes, 0, clazz, staticModule, in.file.toString())
+ case None =>
+ throw new RuntimeException("Scala class file does not contain Scala annotation")
+ }
if (settings.debug.value)
- global.inform("" + sym + "; annotations = " + sym.annotations)
+ global.inform("" + sym + "; annotations = " + sym.rawAnnotations)
} else
in.skip(attrLen)
@@ -820,7 +867,7 @@ abstract class ClassfileParser {
val srcfileLeaf = pool.getName(in.nextChar).toString.trim
val srcpath = sym.enclosingPackage match {
case NoSymbol => srcfileLeaf
- case pkg => pkg.fullNameString(File.separatorChar)+File.separator+srcfileLeaf
+ case pkg => pkg.fullName(File.separatorChar)+File.separator+srcfileLeaf
}
srcfile0 = settings.outputDirs.srcFilesFor(in.file, srcpath).find(_.exists)
case _ =>
@@ -842,7 +889,7 @@ abstract class ClassfileParser {
case ENUM_TAG =>
val t = pool.getType(index)
val n = pool.getName(in.nextChar)
- val s = t.typeSymbol.linkedModuleOfClass.info.decls.lookup(n)
+ val s = t.typeSymbol.companionModule.info.decls.lookup(n)
assert(s != NoSymbol, t)
Some(LiteralAnnotArg(Constant(s)))
case ARRAY_TAG =>
@@ -860,6 +907,12 @@ abstract class ClassfileParser {
}
}
+ def parseScalaSigBytes: Option[ScalaSigBytes] = {
+ val tag = in.nextByte.toChar
+ assert(tag == STRING_TAG)
+ Some(ScalaSigBytes(pool.getBytes(in.nextChar)))
+ }
+
/** Parse and return a single annotation. If it is malformed,
* return None.
*/
@@ -870,10 +923,19 @@ abstract class ClassfileParser {
var hasError = false
for (i <- 0 until nargs) {
val name = pool.getName(in.nextChar)
- parseAnnotArg match {
- case Some(c) => nvpairs += ((name, c))
- case None => hasError = true
- }
+ // The "bytes: String" argument of the ScalaSignature attribute is parsed specially so that it is
+ // available as an array of bytes (the pickled Scala signature) instead of as a string. The pickled signature
+ // is encoded as a string because of limitations in the Java class file format.
+ if ((attrType == definitions.ScalaSignatureAnnotation.tpe) && (name == nme.bytes))
+ parseScalaSigBytes match {
+ case Some(c) => nvpairs += ((name, c))
+ case None => hasError = true
+ }
+ else
+ parseAnnotArg match {
+ case Some(c) => nvpairs += ((name, c))
+ case None => hasError = true
+ }
}
if (hasError) None
else Some(AnnotationInfo(attrType, List(), nvpairs.toList))
@@ -900,17 +962,20 @@ abstract class ClassfileParser {
}
}
- /** Parse a sequence of annotations and attach them to the
- * current symbol sym.
- */
- def parseAnnotations(len: Int) {
+ /** Parse a sequence of annotations and attaches them to the
+ * current symbol sym, except for the ScalaSignature annotation that it returns, if it is available. */
+ def parseAnnotations(len: Int): Option[AnnotationInfo] = {
val nAttr = in.nextChar
+ var scalaSigAnnot: Option[AnnotationInfo] = None
for (n <- 0 until nAttr)
parseAnnotation(in.nextChar) match {
+ case Some(scalaSig) if (scalaSig.atp == definitions.ScalaSignatureAnnotation.tpe) =>
+ scalaSigAnnot = Some(scalaSig)
case Some(annot) =>
sym.addAnnotation(annot)
case None =>
}
+ scalaSigAnnot
}
// begin parseAttributes
@@ -950,12 +1015,11 @@ abstract class ClassfileParser {
}
}
- for (entry <- innerClasses.valuesIterator) {
+ for (entry <- innerClasses.values) {
// create a new class member for immediate inner classes
if (entry.outerName == externalName) {
- val file = global.classPath.findClass(entry.externalName.toString) match {
- case Some(ClassRep(Some(binary: AbstractFile), _)) => binary
- case _ => throw new AssertionError(entry.externalName)
+ val file = global.classPath.findSourceFile(entry.externalName.toString) getOrElse {
+ throw new AssertionError(entry.externalName)
}
enterClassAndModule(entry, new global.loaders.ClassfileLoader(file), entry.jflags)
}
@@ -984,6 +1048,10 @@ abstract class ClassfileParser {
in.skip(attrLen)
case nme.ScalaSignatureATTR =>
isScala = true
+ val pbuf = new PickleBuffer(in.buf, in.bp, in.bp + attrLen)
+ pbuf.readNat; pbuf.readNat;
+ if (pbuf.readNat == 0) // a scala signature attribute with no entries means that the actual scala signature
+ isScalaAnnot = true // is in a ScalaSignature annotation.
in.skip(attrLen)
case nme.ScalaATTR =>
isScalaRaw = true
@@ -1042,7 +1110,7 @@ abstract class ClassfileParser {
def getMember(sym: Symbol, name: Name): Symbol =
if (static)
if (sym == clazz) staticDefs.lookup(name)
- else sym.linkedModuleOfClass.info.member(name)
+ else sym.companionModule.info.member(name)
else
if (sym == clazz) instanceDefs.lookup(name)
else sym.info.member(name)
@@ -1055,18 +1123,18 @@ abstract class ClassfileParser {
val sym = classSymbol(outerName)
val s =
// if loading during initialization of `definitions' typerPhase is not yet set.
- // in that case we simply load the mmeber at the current phase
+ // in that case we simply load the member at the current phase
if (currentRun.typerPhase != null)
atPhase(currentRun.typerPhase)(getMember(sym, innerName.toTypeName))
else
getMember(sym, innerName.toTypeName)
- assert(s ne NoSymbol, sym + "." + innerName + " linkedModule: " + sym.linkedModuleOfClass + sym.linkedModuleOfClass.info.members)
+ assert(s ne NoSymbol, sym + "." + innerName + " linkedModule: " + sym.companionModule + sym.companionModule.info.members)
s
case None =>
val cls = classNameToSymbol(externalName)
cls
- //if (static) cls.linkedClassOfModule else cls
+ //if (static) cls.companionClass else cls
}
}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
index c75c9982de..92dfb3749a 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
@@ -14,7 +14,6 @@ import scala.collection.mutable._
import scala.tools.nsc._
import scala.tools.nsc.backend.icode._
import scala.tools.nsc.io._
-import scala.tools.nsc.util.{Position, NoPosition, ClassRep}
import ClassfileConstants._
import Flags._
@@ -49,19 +48,11 @@ abstract class ICodeReader extends ClassfileParser {
isScalaModule = cls.isModule && !cls.hasFlag(JAVA)
log("Reading class: " + cls + " isScalaModule?: " + isScalaModule)
- val name = cls.fullNameString('.') + (if (sym.hasFlag(MODULE)) "$" else "")
- classPath.findClass(name) match {
- case Some(ClassRep(bin, _)) =>
- assert(bin.isDefined, "No classfile for " + cls)
- classFile = bin.get.asInstanceOf[AbstractFile]
-// if (isScalaModule)
-// sym = cls.linkedClassOfModule
-
-// for (s <- cls.info.members)
-// Console.println("" + s + ": " + s.tpe)
- parse(classFile, sym)
- case _ =>
- log("Could not find: " + cls)
+ val name = cls.fullName('.') + (if (sym.hasFlag(MODULE)) "$" else "")
+
+ classPath.findSourceFile(name) match {
+ case Some(classFile) => parse(classFile, sym)
+ case _ => log("Could not find: " + cls)
}
(staticCode, instanceCode)
@@ -144,24 +135,22 @@ abstract class ICodeReader extends ClassfileParser {
res
}
- /** Checks if tp1 is the same type as tp2, modulo implict methods.
- * We don't care about the distinction between implcit and explicit
+ /** Checks if tp1 is the same type as tp2, modulo implicit methods.
+ * We don't care about the distinction between implicit and explicit
* methods as this point, and we can't get back the information from
* bytecode anyway.
*/
private def sameType(tp1: Type, tp2: Type): Boolean = (tp1, tp2) match {
- case (MethodType(args1, resTpe1), MethodType(args2, resTpe2)) =>
- if (tp1.isInstanceOf[ImplicitMethodType] || tp2.isInstanceOf[ImplicitMethodType]) {
- MethodType(args1, resTpe1) =:= MethodType(args2, resTpe2)
- } else
- tp1 =:= tp2
- case _ => tp1 =:= tp2
+ case (mt1 @ MethodType(args1, resTpe1), mt2 @ MethodType(args2, resTpe2)) if mt1.isImplicit || mt2.isImplicit =>
+ MethodType(args1, resTpe1) =:= MethodType(args2, resTpe2)
+ case _ =>
+ tp1 =:= tp2
}
override def parseMethod() {
val (jflags, sym) = parseMember(false)
if (sym != NoSymbol) {
- log("Parsing method " + sym.fullNameString + ": " + sym.tpe);
+ log("Parsing method " + sym.fullName + ": " + sym.tpe);
this.method = new IMethod(sym);
this.method.returnType = toTypeKind(sym.tpe.resultType)
getCode(jflags).addMethod(this.method)
@@ -638,7 +627,12 @@ abstract class ICodeReader extends ClassfileParser {
if (code.containsNEW) code.resolveNEWs
}
- /** TODO: move in Definitions and remove obsolete isBox/isUnbox found there. */
+ /** Note: these methods are different from the methods of the same name found
+ * in Definitions. These test whether a symbol represents one of the boxTo/unboxTo
+ * methods found in BoxesRunTime. The others test whether a symbol represents a
+ * synthetic method from one of the fake companion classes of the primitive types,
+ * such as Int.box(5).
+ */
def isBox(m: Symbol): Boolean =
(m.owner == definitions.BoxesRunTimeClass.moduleClass
&& m.name.startsWith("boxTo"))
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/MetaParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/MetaParser.scala
index bd20c2dabf..0ae2d5b015 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/MetaParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/MetaParser.scala
@@ -11,7 +11,6 @@ package classfile
import java.util.{StringTokenizer, NoSuchElementException}
import scala.collection.mutable.ListBuffer
-import scala.tools.nsc.util.{Position,NoPosition}
abstract class MetaParser{
@@ -78,7 +77,7 @@ abstract class MetaParser{
val hi =
if (token == "<") { nextToken(); parseType() }
else definitions.AnyClass.tpe
- sym.setInfo(mkTypeBounds(lo, hi))
+ sym.setInfo(TypeBounds(lo, hi))
locals enter sym;
sym
}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
index e080df9210..68d314bfe9 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
@@ -9,11 +9,11 @@ package symtab
package classfile
import java.lang.{Float, Double}
-import scala.tools.nsc.util.{Position, NoPosition, ShowPickled}
-import scala.collection.mutable.Set
-import Flags._
+import util.{ Position, NoPosition, ShowPickled }
+import collection.mutable.Set
+import reflect.generic.{ PickleBuffer, PickleFormat }
import PickleFormat._
-
+import Flags._
/**
* Serialize a top-level module and/or class.
@@ -51,7 +51,7 @@ abstract class Pickler extends SubComponent {
val sym = tree.symbol
val pickle = new Pickle(sym, sym.name.toTermName, sym.owner)
add(sym, pickle)
- add(sym.linkedSym, pickle)
+ add(sym.companionSymbol, pickle)
pickle.finish
case _ =>
}
@@ -68,7 +68,7 @@ abstract class Pickler extends SubComponent {
private val index = new LinkedHashMap[AnyRef, Int]
// collect higher-order type params
- private var locals: Set[Symbol] = Set()
+ //private var locals: Set[Symbol] = Set()
// private var boundSyms: List[Symbol] = Nil
@@ -92,7 +92,7 @@ abstract class Pickler extends SubComponent {
(isRootSym(sym) ||
sym.isRefinementClass ||
sym.isAbstractType && sym.hasFlag(EXISTENTIAL) || // existential param
- (locals contains sym) || // higher-order type param
+ (sym hasFlag PARAM) ||
isLocal(sym.owner))
private def staticAnnotations(annots: List[AnnotationInfo]) =
@@ -136,7 +136,6 @@ abstract class Pickler extends SubComponent {
if (sym.thisSym.tpeHK != sym.tpeHK)
putType(sym.typeOfThis);
putSymbol(sym.alias)
- putSymbol(sym.defaultGetter)
if (!sym.children.isEmpty) {
val (locals, globals) = sym.children.toList.partition(_.isLocalClass)
val children =
@@ -146,7 +145,7 @@ abstract class Pickler extends SubComponent {
localChildDummy.setInfo(ClassInfoType(List(sym.tpe), EmptyScope, localChildDummy))
localChildDummy :: globals
}
- putChildren(sym, children.sortWith((x, y) => x isLess y))
+ putChildren(sym, children sortBy (_.sealedSortName))
}
for (annot <- staticAnnotations(sym.annotations.reverse))
putAnnotation(sym, annot)
@@ -160,7 +159,7 @@ abstract class Pickler extends SubComponent {
private def putSymbols(syms: List[Symbol]) =
syms foreach putSymbol
- /** Store type and everythig it refers to in map <code>index</code>.
+ /** Store type and everything it refers to in map <code>index</code>.
*
* @param tp ...
*/
@@ -194,9 +193,11 @@ abstract class Pickler extends SubComponent {
case MethodType(params, restpe) =>
putType(restpe); putSymbols(params)
case PolyType(tparams, restpe) =>
+ /** no longer needed since all params are now local
tparams foreach { tparam =>
if (!isLocal(tparam)) locals += tparam // similar to existential types, these tparams are local
}
+ */
putType(restpe); putSymbols(tparams)
case ExistentialType(tparams, restpe) =>
// val savedBoundSyms = boundSyms // boundSyms are known to be local based on the EXISTENTIAL flag (see isLocal)
@@ -272,11 +273,11 @@ abstract class Pickler extends SubComponent {
putEntry(from)
putEntry(to)
}
-
- case DocDef(comment, definition) =>
+/*
+ case DocDef(comment, definition) => should not be needed
putConstant(Constant(comment))
putTree(definition)
-
+*/
case Template(parents, self, body) =>
writeNat(parents.length)
putTrees(parents)
@@ -561,8 +562,6 @@ abstract class Pickler extends SubComponent {
writeSymInfo(sym)
if (sym.isAbstractType) TYPEsym else ALIASsym
case sym: TermSymbol =>
- if (!sym.isModule && sym.defaultGetter != NoSymbol)
- writeRef(sym.defaultGetter)
writeSymInfo(sym)
if (sym.alias != NoSymbol) writeRef(sym.alias)
if (sym.isModule) MODULEsym else VALsym
@@ -586,9 +585,9 @@ abstract class Pickler extends SubComponent {
writeRef(tp.typeSymbol); writeRefs(parents); REFINEDtpe
case ClassInfoType(parents, decls, clazz) =>
writeRef(clazz); writeRefs(parents); CLASSINFOtpe
- case MethodType(formals, restpe) =>
+ case mt @ MethodType(formals, restpe) =>
writeRef(restpe); writeRefs(formals)
- if (entry.isInstanceOf[ImplicitMethodType]) IMPLICITMETHODtpe
+ if (mt.isImplicit) IMPLICITMETHODtpe
else METHODtpe
case PolyType(tparams, restpe) =>
writeRef(restpe); writeRefs(tparams); POLYtpe
@@ -1017,7 +1016,6 @@ abstract class Pickler extends SubComponent {
case sym: TermSymbol =>
print(if (sym.isModule) "MODULEsym " else "VALsym ")
printSymInfo(sym)
- if (!sym.isModule) printRef(sym.defaultGetter)
if (sym.alias != NoSymbol) printRef(sym.alias)
case NoType =>
print("NOtpe")
@@ -1037,13 +1035,14 @@ abstract class Pickler extends SubComponent {
print("REFINEDtpe "); printRef(tp.typeSymbol); printRefs(parents);
case ClassInfoType(parents, decls, clazz) =>
print("CLASSINFOtpe "); printRef(clazz); printRefs(parents);
- case MethodType(formals, restpe) =>
- print(if (entry.isInstanceOf[ImplicitMethodType]) "IMPLICITMETHODtpe " else "METHODtpe ");
+ case mt @ MethodType(formals, restpe) =>
+ print(if (mt.isImplicit) "IMPLICITMETHODtpe " else "METHODtpe ");
printRef(restpe); printRefs(formals)
case PolyType(tparams, restpe) =>
print("POLYtpe "); printRef(restpe); printRefs(tparams);
case ExistentialType(tparams, restpe) =>
print("EXISTENTIALtpe "); printRef(restpe); printRefs(tparams);
+ print("||| "+entry)
case DeBruijnIndex(l, i) =>
print("DEBRUIJNINDEXtpe "); print(l+" "+i)
case c @ Constant(_) =>
@@ -1099,7 +1098,7 @@ abstract class Pickler extends SubComponent {
println("Pickled info for "+rootName+" V"+MajorVersion+"."+MinorVersion)
}
for (i <- 0 until ep) {
- if (showSig) {
+ if (showSig/* || rootName.toString == "StaticCompletion"*/) {
print((i formatted "%3d: ")+(writeIndex formatted "%5d: "))
printEntry(entries(i))
}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/UnPickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/UnPickler.scala
index 05ffba925a..4022258746 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/UnPickler.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/UnPickler.scala
@@ -11,769 +11,47 @@ package classfile
import java.io.IOException
import java.lang.{Float, Double}
-import scala.tools.nsc.util.{Position, NoPosition}
-
import Flags._
-import PickleFormat._
+import scala.reflect.generic.PickleFormat._
import collection.mutable.{HashMap, ListBuffer}
import annotation.switch
-/** This abstract class implements ..
- *
- * @author Martin Odersky
+/** @author Martin Odersky
* @version 1.0
*/
-abstract class UnPickler {
+abstract class UnPickler extends reflect.generic.UnPickler {
val global: Global
import global._
- /** Unpickle symbol table information descending from a class and/or module root
- * from an array of bytes.
- * @param bytes bytearray from which we unpickle
- * @param offset offset from which unpickling starts
- * @param classroot the top-level class which is unpickled, or NoSymbol if unapplicable
- * @param moduleroot the top-level module which is unpickled, or NoSymbol if unapplicable
- * @param filename filename associated with bytearray, only used for error messages
- */
- def unpickle(bytes: Array[Byte], offset: Int, classRoot: Symbol, moduleRoot: Symbol, filename: String) {
- try {
- val p = if (currentRun.isDefined &&
- currentRun.picklerPhase != NoPhase &&
- phase.id > currentRun.picklerPhase.id) currentRun.picklerPhase
- else phase
- atPhase(p) {
- new UnPickle(bytes, offset, classRoot, moduleRoot, filename)
- }
- } catch {
- case ex: IOException =>
- throw ex
- case ex: Throwable =>
- /*if (settings.debug.value)*/ ex.printStackTrace()
- throw new RuntimeException("error reading Scala signature of "+filename+": "+ex.getMessage())
- }
- }
-
- private class UnPickle(bytes: Array[Byte], offset: Int, classRoot: Symbol, moduleRoot: Symbol, filename: String) extends PickleBuffer(bytes, offset, -1) {
- if (settings.debug.value) global.log("unpickle " + classRoot + " and " + moduleRoot)
- checkVersion(filename)
-
- /** A map from entry numbers to array offsets */
- private val index = createIndex
-
- /** A map from entry numbers to symbols, types, or annotations */
- private val entries = new Array[AnyRef](index.length)
-
- /** A map from symbols to their associated `decls' scopes */
- private val symScopes = new HashMap[Symbol, Scope]
-
- for (i <- 0 until index.length) {
- if (isSymbolEntry(i))
- at(i, readSymbol)
- else if (isSymbolAnnotationEntry(i))
- at(i, {() => readSymbolAnnotation(); null})
- else if (isChildrenEntry(i))
- at(i, {() => readChildren(); null})
- }
-
- if (settings.debug.value) global.log("unpickled " + classRoot + ":" + classRoot.rawInfo + ", " + moduleRoot + ":" + moduleRoot.rawInfo);//debug
-
- private def checkVersion(filename: String) {
- val major = readNat()
- val minor = readNat()
- if (major != MajorVersion || minor > MinorVersion)
- throw new IOException("Scala signature " + classRoot.name +
- " has wrong version\n expected: " +
- MajorVersion + "." + MinorVersion +
- "\n found: " + major + "." + minor +
- " in "+filename)
- }
-
- /** The `decls' scope associated with given symbol */
- private def symScope(sym: Symbol) = symScopes.get(sym) match {
- case None => val s = new Scope; symScopes(sym) = s; s
- case Some(s) => s
- }
-
- /** Does entry represent an (internal) symbol */
- private def isSymbolEntry(i: Int): Boolean = {
- val tag = bytes(index(i)).toInt
- (firstSymTag <= tag && tag <= lastSymTag &&
- (tag != CLASSsym || !isRefinementSymbolEntry(i)))
- }
-
- /** Does entry represent an (internal or external) symbol */
- private def isSymbolRef(i: Int): Boolean = {
- val tag = bytes(index(i))
- (firstSymTag <= tag && tag <= lastExtSymTag)
- }
-
- /** Does entry represent a name? */
- private def isNameEntry(i: Int): Boolean = {
- val tag = bytes(index(i)).toInt
- tag == TERMname || tag == TYPEname
- }
-
- /** Does entry represent a symbol annotation? */
- private def isSymbolAnnotationEntry(i: Int): Boolean = {
- val tag = bytes(index(i)).toInt
- tag == SYMANNOT
- }
-
- /** Does the entry represent children of a symbol? */
- private def isChildrenEntry(i: Int): Boolean = {
- val tag = bytes(index(i)).toInt
- tag == CHILDREN
- }
-
- /** Does entry represent a refinement symbol?
- * pre: Entry is a class symbol
- */
- private def isRefinementSymbolEntry(i: Int): Boolean = {
- val savedIndex = readIndex
- readIndex = index(i)
- val tag = readByte().toInt
- assert(tag == CLASSsym)
-
- readNat(); // read length
- val result = readNameRef() == nme.REFINE_CLASS_NAME.toTypeName
- readIndex = savedIndex
- result
- }
-
- /** If entry at <code>i</code> is undefined, define it by performing
- * operation <code>op</code> with <code>readIndex at start of i'th
- * entry. Restore <code>readIndex</code> afterwards.
- */
- private def at[T <: AnyRef](i: Int, op: () => T): T = {
- var r = entries(i)
- if (r eq null) {
- val savedIndex = readIndex
- readIndex = index(i)
- r = op()
- assert(entries(i) eq null, entries(i))
- entries(i) = r
- readIndex = savedIndex
- }
- r.asInstanceOf[T]
- }
-
- /** Read a name */
- private def readName(): Name = {
- val tag = readByte()
- val len = readNat()
- tag match {
- case TERMname => newTermName(bytes, readIndex, len)
- case TYPEname => newTypeName(bytes, readIndex, len)
- case _ => errorBadSignature("bad name tag: " + tag)
- }
- }
+ def scan(bytes: Array[Byte], offset: Int, classRoot: Symbol, moduleRoot: Symbol, filename: String) =
+ new CompileScan(bytes, offset, classRoot, moduleRoot, filename).run()
- /** Read a symbol */
- private def readSymbol(): Symbol = {
- val tag = readByte()
- val end = readNat() + readIndex
- var sym: Symbol = NoSymbol
- tag match {
- case EXTref | EXTMODCLASSref =>
- val name = readNameRef()
- val owner = if (readIndex == end) definitions.RootClass else readSymbolRef()
- def fromName(name: Name) =
- if (name.toTermName == nme.ROOT) definitions.RootClass
- else if (name == nme.ROOTPKG) definitions.RootPackage
- else if (tag == EXTref) owner.info.decl(name)
- else owner.info.decl(name).moduleClass
- sym = fromName(name)
- // If sym not found try with expanded name.
- // This can happen if references to private symbols are
- // read from outside; for instance when checking the children of a class
- // (see t1722)
- if (sym == NoSymbol) {
- sym = fromName(owner.expandedName(name))
- }
+ class CompileScan(bytes: Array[Byte], offset: Int, classRoot: Symbol, moduleRoot: Symbol, filename: String)
+ extends Scan(bytes, offset, classRoot, moduleRoot, filename) {
- // If the owner is overloaded (i.e. a method), it's not possible to select the
- // right member => return NoSymbol. This can only happen when unpickling a tree.
- // the "case Apply" in readTree() takes care of selecting the correct alternative
- // after parsing the arguments.
- if (sym == NoSymbol && !owner.hasFlag(OVERLOADED)) {
- errorMissingRequirement(
- "reference " + (if (name.isTypeName) "type " else "value ") +
- name.decode + " of " + owner.tpe.widen + " refers to nonexisting symbol.")
- }
- case NONEsym =>
- sym = NoSymbol
- case _ => // symbols that were pickled with Pickler.writeSymInfo
- var defaultGetter: Symbol = NoSymbol
- var nameref = readNat()
- if (tag == VALsym && isSymbolRef(nameref)) {
- defaultGetter = at(nameref, readSymbol)
- nameref = readNat()
- }
- val name = at(nameref, readName)
- val owner = readSymbolRef()
- val flags = pickledToRawFlags(readLongNat())
- var privateWithin: Symbol = NoSymbol
- var inforef = readNat()
- if (isSymbolRef(inforef)) {
- privateWithin = at(inforef, readSymbol)
- inforef = readNat()
- }
- tag match {
- case TYPEsym =>
- sym = owner.newAbstractType(NoPosition, name)
- case ALIASsym =>
- sym = owner.newAliasType(NoPosition, name)
- case CLASSsym =>
- sym =
- if (name == classRoot.name && owner == classRoot.owner)
- (if ((flags & MODULE) != 0L) moduleRoot.moduleClass
- else classRoot)
- else
- if ((flags & MODULE) != 0L) owner.newModuleClass(NoPosition, name)
- else owner.newClass(NoPosition, name)
- if (readIndex != end) sym.typeOfThis = new LazyTypeRef(readNat())
- case MODULEsym =>
- val clazz = at(inforef, readType).typeSymbol
- sym =
- if (name == moduleRoot.name && owner == moduleRoot.owner) moduleRoot
- else {
- assert(clazz.isInstanceOf[ModuleClassSymbol], clazz)
- val mclazz = clazz.asInstanceOf[ModuleClassSymbol]
- val m = owner.newModule(NoPosition, name, mclazz)
- mclazz.setSourceModule(m)
- m
- }
- case VALsym =>
- sym = if (name == moduleRoot.name && owner == moduleRoot.owner) moduleRoot.resetFlag(MODULE)
- else owner.newValue(NoPosition, name)
- sym.defaultGetter = defaultGetter
- case _ =>
- errorBadSignature("bad symbol tag: " + tag)
- }
- sym.setFlag(flags.toLong & PickledFlags)
- sym.privateWithin = privateWithin
- if (readIndex != end) assert(sym hasFlag (SUPERACCESSOR | PARAMACCESSOR), sym)
- if (sym hasFlag SUPERACCESSOR) assert(readIndex != end)
- sym.setInfo(
- if (readIndex != end) new LazyTypeRefAndAlias(inforef, readNat())
- else new LazyTypeRef(inforef))
- if (sym.owner.isClass && sym != classRoot && sym != moduleRoot &&
- !sym.isModuleClass && !sym.isRefinementClass && !sym.isTypeParameter && !sym.isExistential)
- symScope(sym.owner) enter sym
- }
- sym
- }
+ protected override def debug = settings.debug.value
- /** Read a type */
- private def readType(): Type = {
- val tag = readByte()
- val end = readNat() + readIndex
+ override def noSuchTypeTag(tag: Int, end: Int): Type = {
tag match {
- case NOtpe =>
- NoType
- case NOPREFIXtpe =>
- NoPrefix
- case THIStpe =>
- mkThisType(readSymbolRef())
- case SINGLEtpe =>
- singleType(readTypeRef(), readSymbolRef())
- case SUPERtpe =>
- val thistpe = readTypeRef()
- val supertpe = readTypeRef()
- SuperType(thistpe, supertpe)
- case CONSTANTtpe =>
- mkConstantType(readConstantRef())
- case TYPEREFtpe =>
- val pre = readTypeRef()
- val sym = readSymbolRef()
- var args = until(end, readTypeRef)
- rawTypeRef(pre, sym, args)
- case TYPEBOUNDStpe =>
- mkTypeBounds(readTypeRef(), readTypeRef())
- case REFINEDtpe =>
- val clazz = readSymbolRef()
-/*
- val ps = until(end, readTypeRef)
- val dcls = symScope(clazz)
- new RefinedType(ps, dcls) { override def symbol = clazz }
-*/
- new RefinedType(until(end, readTypeRef), symScope(clazz)) {
- override def typeSymbol = clazz
- }
- case CLASSINFOtpe =>
- val clazz = readSymbolRef()
- ClassInfoType(until(end, readTypeRef), symScope(clazz), clazz)
- case METHODtpe =>
- val restpe = readTypeRef()
- val params = until(end, readSymbolRef)
- // if the method is overloaded, the params cannot be determined (see readSymbol) => return NoType.
- // Only happen for trees, "case Apply" in readTree() takes care of selecting the correct
- // alternative after parsing the arguments.
- if (params.contains(NoSymbol) || restpe == NoType) NoType
- else MethodType(params, restpe)
- case IMPLICITMETHODtpe =>
- val restpe = readTypeRef()
- val params = until(end, readSymbolRef)
- ImplicitMethodType(params, restpe)
- case POLYtpe =>
- val restpe = readTypeRef()
- val typeParams = until(end, readSymbolRef)
- // see comment above in "case METHODtpe"
- if (typeParams.contains(NoSymbol) || restpe == NoType) NoType
- else PolyType(typeParams, restpe)
- case EXISTENTIALtpe =>
- val restpe = readTypeRef()
- ExistentialType(until(end, readSymbolRef), restpe)
- case ANNOTATEDtpe =>
- var typeRef = readNat()
- val selfsym = if (isSymbolRef(typeRef)) {
- val s = at(typeRef, readSymbol)
- typeRef = readNat()
- s
- } else NoSymbol
- val tp = at(typeRef, readType)
- val annots = until(end, readAnnotationRef)
- if (settings.selfInAnnots.value || (selfsym == NoSymbol))
- AnnotatedType(annots, tp, selfsym)
- else
- tp // drop annotations with a self symbol unless
- // -Yself-in-annots is on
case DEBRUIJNINDEXtpe =>
DeBruijnIndex(readNat(), readNat())
case _ =>
- errorBadSignature("bad type tag: " + tag)
- }
- }
-
- /** Read a constant */
- private def readConstant(): Constant = {
- val tag = readByte().toInt
- val len = readNat()
- (tag: @switch) match {
- case LITERALunit => Constant(())
- case LITERALboolean => Constant(readLong(len) != 0L)
- case LITERALbyte => Constant(readLong(len).toByte)
- case LITERALshort => Constant(readLong(len).toShort)
- case LITERALchar => Constant(readLong(len).toChar)
- case LITERALint => Constant(readLong(len).toInt)
- case LITERALlong => Constant(readLong(len))
- case LITERALfloat => Constant(Float.intBitsToFloat(readLong(len).toInt))
- case LITERALdouble => Constant(Double.longBitsToDouble(readLong(len)))
- case LITERALstring => Constant(readNameRef().toString())
- case LITERALnull => Constant(null)
- case LITERALclass => Constant(readTypeRef())
- case LITERALenum => Constant(readSymbolRef())
- case _ => errorBadSignature("bad constant tag: " + tag)
- }
- }
-
- /** Read children and store them into the corresponding symbol.
- */
- private def readChildren() {
- val tag = readByte()
- assert(tag == CHILDREN)
- val end = readNat() + readIndex
- val target = readSymbolRef()
- while (readIndex != end) target addChild readSymbolRef()
- }
-
- /** Read an annotation argument, which is pickled either
- * as a Constant or a Tree.
- */
- private def readAnnotArg(): Tree = {
- if (peekByte() == TREE) {
- readTree()
- } else {
- val const = readConstant()
- Literal(const).setType(const.tpe)
+ super.noSuchTypeTag(tag, end)
}
}
- /** Read a ClassfileAnnotArg (argument to a classfile annotation)
- */
- private def readClassfileAnnotArg(): ClassfileAnnotArg = peekByte() match {
- case ANNOTINFO =>
- NestedAnnotArg(readAnnotation())
- case ANNOTARGARRAY =>
- readByte()
- val end = readNat() + readIndex
- ArrayAnnotArg(until(end, readClassfileAnnotArgRef).toArray)
- case _ =>
- LiteralAnnotArg(readConstant())
- }
+ override protected def errorMissingRequirement(name: Name, owner: Symbol) =
+ errorMissingRequirement(
+ "reference " + (if (name.isTypeName) "type " else "value ") +
+ name.decode + " of " + owner.tpe.widen + " refers to nonexisting symbol.")
- /** Read an AnnotationInfo. Not to be called directly, use
- * readAnnotation or readSymbolAnnotation
- */
- private def readAnnotationInfo(end: Int): AnnotationInfo = {
- val atp = readTypeRef()
- val args = new ListBuffer[Tree]
- val assocs = new ListBuffer[(Name, ClassfileAnnotArg)]
- while (readIndex != end) {
- val argref = readNat()
- if (isNameEntry(argref))
- assocs += ((at(argref, readName), readClassfileAnnotArgRef))
- else
- args += at(argref, readAnnotArg)
- }
- AnnotationInfo(atp, args.toList, assocs.toList)
- }
-
- /** Read an annotation and as a side effect store it into
- * the symbol it requests. Called at top-level, for all
- * (symbol, annotInfo) entries. */
- private def readSymbolAnnotation() {
- val tag = readByte()
- if (tag != SYMANNOT)
- errorBadSignature("symbol annotation expected ("+ tag +")")
- val end = readNat() + readIndex
- val target = readSymbolRef()
- target.addAnnotation(readAnnotationInfo(end))
- }
-
- /** Read an annotation and return it. Used when unpickling
- * an ANNOTATED(WSELF)tpe or a NestedAnnotArg */
- private def readAnnotation(): AnnotationInfo = {
- val tag = readByte()
- if (tag != ANNOTINFO)
- errorBadSignature("annotation expected (" + tag + ")")
- val end = readNat() + readIndex
- readAnnotationInfo(end)
- }
-
- /* Read an abstract syntax tree */
- private def readTree(): Tree = {
- val outerTag = readByte()
- if (outerTag != TREE)
- errorBadSignature("tree expected (" + outerTag + ")")
- val end = readNat() + readIndex
- val tag = readByte()
- val tpe = if (tag == EMPTYtree) NoType else readTypeRef()
+ def inferMethodAlternative(fun: Tree, argtpes: List[Type], restpe: Type) =
+ typer.infer.inferMethodAlternative(fun, List(), argtpes, restpe)
- // Set by the three functions to follow. If symbol is non-null
- // after the the new tree 't' has been created, t has its Symbol
- // set to symbol; and it always has its Type set to tpe.
- var symbol: Symbol = null
- var mods: Modifiers = null
- var name: Name = null
-
- /** Read a Symbol, Modifiers, and a Name */
- def setSymModsName() {
- symbol = readSymbolRef()
- mods = readModifiersRef()
- name = readNameRef()
- }
- /** Read a Symbol and a Name */
- def setSymName() {
- symbol = readSymbolRef()
- name = readNameRef()
- }
- /** Read a Symbol */
- def setSym() {
- symbol = readSymbolRef()
- }
-
- val t = tag match {
- case EMPTYtree =>
- EmptyTree
-
- case PACKAGEtree =>
- setSym()
- // val discardedSymbol = readSymbolRef() // XXX is symbol intentionally not set?
- val pid = readTreeRef().asInstanceOf[RefTree]
- val stats = until(end, readTreeRef)
- PackageDef(pid, stats)
-
- case CLASStree =>
- setSymModsName()
- val impl = readTemplateRef()
- val tparams = until(end, readTypeDefRef)
- ClassDef(mods, name, tparams, impl)
-
- case MODULEtree =>
- setSymModsName()
- ModuleDef(mods, name, readTemplateRef())
-
- case VALDEFtree =>
- setSymModsName()
- val tpt = readTreeRef()
- val rhs = readTreeRef()
- ValDef(mods, name, tpt, rhs)
-
- case DEFDEFtree =>
- setSymModsName()
- val tparams = times(readNat(), readTypeDefRef)
- val vparamss = times(readNat(), () => times(readNat(), readValDefRef))
- val tpt = readTreeRef()
- val rhs = readTreeRef()
-
- DefDef(mods, name, tparams, vparamss, tpt, rhs)
-
- case TYPEDEFtree =>
- setSymModsName()
- val rhs = readTreeRef()
- val tparams = until(end, readTypeDefRef)
- TypeDef(mods, name, tparams, rhs)
-
- case LABELtree =>
- setSymName()
- val rhs = readTreeRef()
- val params = until(end, readIdentRef)
- LabelDef(name, params, rhs)
-
- case IMPORTtree =>
- setSym()
- val expr = readTreeRef()
- val selectors = until(end, () => {
- val from = readNameRef()
- val to = readNameRef()
- ImportSelector(from, -1, to, -1)
- })
-
- Import(expr, selectors)
-
- case DOCDEFtree =>
- val comment = readConstantRef match {
- case Constant(com: String) => com
- case other => errorBadSignature("Document comment not a string (" + other + ")")
- }
- val definition = readTreeRef()
- DocDef(DocComment(comment, NoPosition), definition)
-
- case TEMPLATEtree =>
- setSym()
- val parents = times(readNat(), readTreeRef)
- val self = readValDefRef()
- val body = until(end, readTreeRef)
-
- Template(parents, self, body)
-
- case BLOCKtree =>
- val expr = readTreeRef()
- val stats = until(end, readTreeRef)
- Block(stats, expr)
-
- case CASEtree =>
- val pat = readTreeRef()
- val guard = readTreeRef()
- val body = readTreeRef()
- CaseDef(pat, guard, body)
-
- case ALTERNATIVEtree =>
- Alternative(until(end, readTreeRef))
-
- case STARtree =>
- Star(readTreeRef())
-
- case BINDtree =>
- setSymName()
- Bind(name, readTreeRef())
-
- case UNAPPLYtree =>
- val fun = readTreeRef()
- val args = until(end, readTreeRef)
- UnApply(fun, args)
-
- case ARRAYVALUEtree =>
- val elemtpt = readTreeRef()
- val trees = until(end, readTreeRef)
- ArrayValue(elemtpt, trees)
-
- case FUNCTIONtree =>
- setSym()
- val body = readTreeRef()
- val vparams = until(end, readValDefRef)
- Function(vparams, body)
-
- case ASSIGNtree =>
- val lhs = readTreeRef()
- val rhs = readTreeRef()
- Assign(lhs, rhs)
-
- case IFtree =>
- val cond = readTreeRef()
- val thenp = readTreeRef()
- val elsep = readTreeRef()
- If(cond, thenp, elsep)
-
- case MATCHtree =>
- val selector = readTreeRef()
- val cases = until(end, readCaseDefRef)
- Match(selector, cases)
-
- case RETURNtree =>
- setSym()
- Return(readTreeRef())
-
- case TREtree =>
- val block = readTreeRef()
- val finalizer = readTreeRef()
- val catches = until(end, readCaseDefRef)
- Try(block, catches, finalizer)
-
- case THROWtree =>
- Throw(readTreeRef())
-
- case NEWtree =>
- New(readTreeRef())
-
- case TYPEDtree =>
- val expr = readTreeRef()
- val tpt = readTreeRef()
- Typed(expr, tpt)
-
- case TYPEAPPLYtree =>
- val fun = readTreeRef()
- val args = until(end, readTreeRef)
- TypeApply(fun, args)
-
- case APPLYtree =>
- val fun = readTreeRef()
- val args = until(end, readTreeRef)
- if (fun.symbol hasFlag OVERLOADED) {
- fun.setType(fun.symbol.info)
- typer.infer.inferMethodAlternative(fun, Nil, args map (_.tpe), tpe)
- }
- Apply(fun, args)
-
- case APPLYDYNAMICtree =>
- setSym()
- val qual = readTreeRef()
- val args = until(end, readTreeRef)
- ApplyDynamic(qual, args)
-
- case SUPERtree =>
- setSym()
- val qual = readNameRef()
- val mix = readNameRef()
- Super(qual, mix)
-
- case THIStree =>
- setSym()
- This(readNameRef())
-
- case SELECTtree =>
- setSym()
- val qualifier = readTreeRef()
- val selector = readNameRef()
- Select(qualifier, selector)
-
- case IDENTtree =>
- setSymName()
- Ident(name)
-
- case LITERALtree =>
- Literal(readConstantRef())
-
- case TYPEtree =>
- TypeTree()
-
- case ANNOTATEDtree =>
- val annot = readTreeRef()
- val arg = readTreeRef()
- Annotated(annot, arg)
-
- case SINGLETONTYPEtree =>
- SingletonTypeTree(readTreeRef())
-
- case SELECTFROMTYPEtree =>
- val qualifier = readTreeRef()
- val selector = readNameRef()
- SelectFromTypeTree(qualifier, selector)
-
- case COMPOUNDTYPEtree =>
- CompoundTypeTree(readTemplateRef())
-
- case APPLIEDTYPEtree =>
- val tpt = readTreeRef()
- val args = until(end, readTreeRef)
- AppliedTypeTree(tpt, args)
-
- case TYPEBOUNDStree =>
- val lo = readTreeRef()
- val hi = readTreeRef()
- TypeBoundsTree(lo, hi)
-
- case EXISTENTIALTYPEtree =>
- val tpt = readTreeRef()
- val whereClauses = until(end, readTreeRef)
- ExistentialTypeTree(tpt, whereClauses)
-
- case _ =>
- errorBadSignature("unknown tree type (" + tag + ")")
- }
-
- if (symbol == null) t setType tpe
- else t setSymbol symbol setType tpe
- }
-
- def readModifiers(): Modifiers = {
- val tag = readNat()
- if (tag != MODIFIERS)
- errorBadSignature("expected a modifiers tag (" + tag + ")")
- val end = readNat() + readIndex
- val pflagsHi = readNat()
- val pflagsLo = readNat()
- val pflags = (pflagsHi.toLong << 32) + pflagsLo
- val flags = pickledToRawFlags(pflags)
- val privateWithin = readNameRef()
- Modifiers(flags, privateWithin, Nil, new Map.EmptyMap)
- }
-
- /* Read a reference to a pickled item */
- private def readNameRef(): Name = at(readNat(), readName)
- private def readSymbolRef(): Symbol = at(readNat(), readSymbol)
- private def readTypeRef(): Type = at(readNat(), readType)
- private def readConstantRef(): Constant = at(readNat(), readConstant)
- private def readAnnotArgRef(): Tree =
- at(readNat(), readAnnotArg)
- private def readClassfileAnnotArgRef(): ClassfileAnnotArg =
- at(readNat(), readClassfileAnnotArg)
- private def readAnnotationRef(): AnnotationInfo =
- at(readNat(), readAnnotation)
- private def readModifiersRef(): Modifiers =
- at(readNat(), readModifiers)
- private def readTreeRef(): Tree =
- at(readNat(), readTree)
-
- private def readTemplateRef(): Template =
- readTreeRef() match {
- case templ:Template => templ
- case other =>
- errorBadSignature("expected a template (" + other + ")")
- }
- private def readCaseDefRef(): CaseDef =
- readTreeRef() match {
- case tree:CaseDef => tree
- case other =>
- errorBadSignature("expected a case def (" + other + ")")
- }
- private def readValDefRef(): ValDef =
- readTreeRef() match {
- case tree:ValDef => tree
- case other =>
- errorBadSignature("expected a ValDef (" + other + ")")
- }
- private def readIdentRef(): Ident =
- readTreeRef() match {
- case tree:Ident => tree
- case other =>
- errorBadSignature("expected an Ident (" + other + ")")
- }
- private def readTypeDefRef(): TypeDef =
- readTreeRef() match {
- case tree:TypeDef => tree
- case other =>
- errorBadSignature("expected an TypeDef (" + other + ")")
- }
-
- private def errorBadSignature(msg: String) =
- throw new RuntimeException("malformed Scala signature of " + classRoot.name + " at " + readIndex + "; " + msg)
-
- private def errorMissingRequirement(msg: String) =
- if (settings.debug.value) errorBadSignature(msg)
- else throw new IOException("class file needed by "+classRoot.name+" is missing.\n"+msg)
+ def newLazyTypeRef(i: Int): LazyType = new LazyTypeRef(i)
+ def newLazyTypeRefAndAlias(i: Int, j: Int): LazyType = new LazyTypeRefAndAlias(i, j)
+ /** A lazy type which when completed returns type at index `i`. */
private class LazyTypeRef(i: Int) extends LazyType {
private val definedAtRunId = currentRunId
private val p = phase
@@ -786,6 +64,9 @@ abstract class UnPickler {
override def load(sym: Symbol) { complete(sym) }
}
+ /** A lazy type which when completed returns type at index `i` and sets alias
+ * of completed symbol to symbol at index `j`.
+ */
private class LazyTypeRefAndAlias(i: Int, j: Int) extends LazyTypeRef(i) {
override def complete(sym: Symbol) {
super.complete(sym)
diff --git a/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala b/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala
index 7cea294ec8..4d9c0ffc92 100644
--- a/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala
@@ -13,7 +13,6 @@ import java.io.IOException
import ch.epfl.lamp.compiler.msil.{Type => MSILType, Attribute => MSILAttribute, _}
import scala.collection.mutable.{HashMap, HashSet}
-import scala.tools.nsc.util.{Position, NoPosition}
import classfile.UnPickler
/**
@@ -43,7 +42,7 @@ abstract class TypeParser {
def parse(typ: MSILType, root: Symbol) {
- def handleError(e: Exception) = {
+ def handleError(e: Throwable) = {
if (settings.debug.value) e.printStackTrace() //debug
throw new IOException("type '" + typ.FullName + "' is broken\n(" + e.getMessage() + ")")
}
@@ -51,11 +50,11 @@ abstract class TypeParser {
busy = true
if (root.isModule) {
- this.clazz = root.linkedClassOfModule
+ this.clazz = root.companionClass
this.staticModule = root
} else {
this.clazz = root
- this.staticModule = root.linkedModuleOfClass
+ this.staticModule = root.companionModule
}
try {
parseClass(typ)
@@ -120,8 +119,8 @@ abstract class TypeParser {
staticDefs.enter(nclazz)
staticDefs.enter(nmodule)
- assert(nclazz.linkedModuleOfClass == nmodule, nmodule)
- assert(nmodule.linkedClassOfModule == nclazz, nclazz)
+ assert(nclazz.companionModule == nmodule, nmodule)
+ assert(nmodule.companionClass == nclazz, nclazz)
}
val fields = typ.getFields()
diff --git a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
index 956db2767e..92f117428a 100644
--- a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
+++ b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
@@ -116,7 +116,7 @@ abstract class AddInterfaces extends InfoTransform {
* </p>
* <ul>
* <li>
- * for every interface member of <code>iface</code> its implemention
+ * for every interface member of <code>iface</code> its implementation
* method, if one is needed.
* </li>
* <li>
@@ -141,7 +141,7 @@ abstract class AddInterfaces extends InfoTransform {
for (sym <- ifaceDecls.iterator) {
if (isInterfaceMember(sym)) {
if (needsImplMethod(sym)) {
- val impl = sym.cloneSymbol(implClass).setInfo(sym.info).resetFlag(lateDEFERRED)
+ val impl = sym.cloneSymbol(implClass).resetFlag(lateDEFERRED)
if (currentRun.compiles(implClass)) implMethodMap(sym) = impl
decls enter impl
sym setFlag lateDEFERRED
@@ -243,7 +243,7 @@ abstract class AddInterfaces extends InfoTransform {
tree.symbol = implMethod
new ChangeOwnerAndReturnTraverser(ifaceMethod, implMethod)(tree)
case None =>
- throw new Error("implMethod missing for " + ifaceMethod)
+ abort("implMethod missing for " + ifaceMethod)
}
private def implMemberDef(tree: Tree): Tree =
diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
index 81fe911328..e6811cf497 100644
--- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala
+++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
@@ -9,7 +9,6 @@ package transform
import symtab._
import Flags._
-import scala.tools.nsc.util.Position
import scala.collection.mutable.{ListBuffer, HashMap}
abstract class CleanUp extends Transform with ast.TreeDSL {
@@ -59,6 +58,11 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
* not part of it, as opposed to isValueClass in definitions. scala.Int is
* a value class, java.lang.Integer is not. */
def isJavaValueClass(sym: Symbol) = boxedClass contains sym
+ def isJavaValueType(tp: Type) = isJavaValueClass(tp.typeSymbol)
+
+ /** The boxed type if it's a primitive; identity otherwise.
+ */
+ def toBoxedType(tp: Type) = if (isJavaValueType(tp)) boxedClass(tp.typeSymbol).tpe else tp
override def transform(tree: Tree): Tree = tree match {
@@ -89,7 +93,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
* - The type-checker has prevented dynamic applies on methods which
* parameter's erased types are not statically known at the call site.
* This is necessary to allow dispatching the call to the correct
- * method (dispatching on paramters is static in Scala). In practice,
+ * method (dispatching on parameters is static in Scala). In practice,
* this limitation only arises when the called method is defined as a
* refinement, where the refinement defines a parameter based on a
* type variable. */
@@ -327,20 +331,62 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
* a dynamic call will box them as a side-effect. */
/* ### CALLING THE APPLY ### */
- def callAsReflective(paramTypes: List[Type], resType: Type, structResType: Type): Tree = localTyper typed {
- def fixResult(tree: Tree): Tree = localTyper typed {
- structResType.typeSymbol match {
- case UnitClass => BLOCK(tree, REF(BoxedUnit_UNIT))
- case ObjectClass => tree
- case _ => tree AS_ATTR structResType
+ def callAsReflective(paramTypes: List[Type], resType: Type): Tree = {
+ /* Some info about the type of the method being called. */
+ val methSym = ad.symbol
+ val boxedResType = toBoxedType(resType) // Int -> Integer
+ val resultSym = boxedResType.typeSymbol
+ // If this is a primitive method type (like '+' in 5+5=10) then the
+ // parameter types and the (unboxed) result type should all be primitive types,
+ // and the method name should be in the primitive->structural map.
+ def isJavaValueMethod = (
+ (resType :: paramTypes forall isJavaValueType) && // issue #1110
+ (getPrimitiveReplacementForStructuralCall isDefinedAt methSym.name)
+ )
+ // Erasure lets Unit through as Unit, but a method returning Any will have an
+ // erased return type of Object and should also allow Unit.
+ def isDefinitelyUnit = (resultSym == UnitClass)
+ def isMaybeUnit = (resultSym == ObjectClass) || isDefinitelyUnit
+ // If there's any chance this signature could be met by an Array.
+ val isArrayMethodSignature = {
+ def typesMatchApply = paramTypes match {
+ case List(tp) => tp <:< IntClass.tpe
+ case _ => false
+ }
+ def typesMatchUpdate = paramTypes match {
+ case List(tp1, tp2) => (tp1 <:< IntClass.tpe) && isMaybeUnit
+ case _ => false
}
+
+ (methSym.name == nme.length && params.isEmpty) ||
+ (methSym.name == nme.clone_ && params.isEmpty) ||
+ (methSym.name == nme.apply && typesMatchApply) ||
+ (methSym.name == nme.update && typesMatchUpdate)
}
- val qualSym = qual.tpe.typeSymbol
- val methSym = ad.symbol
- def args = qual :: params
+
+ /* Some info about the argument at the call site. */
+ val qualSym = qual.tpe.typeSymbol
+ val args = qual :: params
+ def isDefinitelyArray = (qualSym == ArrayClass)
+ def isMaybeArray = (qualSym == ObjectClass) || isDefinitelyArray
+ def isMaybeBoxed = platform isMaybeBoxed qualSym
+
+ // This is complicated a bit by trying to handle Arrays correctly.
+ // Under normal circumstances if the erased return type is Object then
+ // we're not going to box it to Unit, but that is the situation with
+ // a signature like def f(x: { def update(x: Int, y: Long): Any })
+ //
+ // However we only want to do that boxing if it has been determined
+ // to be an Array and a method returning Unit. But for this fixResult
+ // could be called in one place: instead it is called separately from the
+ // unconditional outcomes (genValueCall, genArrayCall, genDefaultCall.)
+ def fixResult(tree: Tree, mustBeUnit: Boolean = false) =
+ if (mustBeUnit || resultSym == UnitClass) BLOCK(tree, REF(BoxedUnit_UNIT)) // boxed unit
+ else if (resultSym == ObjectClass) tree // no cast necessary
+ else tree AS_ATTR boxedResType // cast to expected type
/** Normal non-Array call */
- def defaultCall = {
+ def genDefaultCall = {
// reflective method call machinery
val invokeName = MethodClass.tpe member nme.invoke_ // reflect.Method.invoke(...)
def cache = REF(reflectiveMethodCache(ad.symbol.name.toString, paramTypes)) // cache Symbol
@@ -354,44 +400,40 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
def catchBody = Throw(Apply(Select(Ident(invokeExc), nme.getCause), Nil))
// try { method.invoke } catch { case e: InvocationTargetExceptionClass => throw e.getCause() }
- fixResult( TRY (invocation) CATCH { CASE (catchVar) ==> catchBody } ENDTRY )
+ fixResult(TRY (invocation) CATCH { CASE (catchVar) ==> catchBody } ENDTRY)
}
- def useValueOperator =
- isMaybeBoxed(qualSym) && // may be a boxed value class
- (getPrimitiveReplacementForStructuralCall isDefinedAt methSym.name) &&
- ((resType :: paramTypes) forall (x => isJavaValueClass(x.typeSymbol))) // issue #1110
-
- def isArrayMethodSignature =
- (methSym.name == nme.length && params.isEmpty) ||
- (methSym.name == nme.update && (structResType.typeSymbol eq UnitClass)) ||
- (methSym.name == nme.apply && params.size == 1) ||
- (methSym.name == nme.clone_ && params.isEmpty)
-
- def isDefinitelyArray = isArrayMethodSignature && (qualSym == ArrayClass)
- def isMaybeArray = isArrayMethodSignature && (qualSym == ObjectClass) // precondition: !isDefinitelyArray
-
- def genArrayCall = methSym.name match {
- case nme.length => REF(boxMethod(IntClass)) APPLY (REF(arrayLengthMethod) APPLY args)
- case nme.update => REF(arrayUpdateMethod) APPLY List(args(0), (REF(unboxMethod(IntClass)) APPLY args(1)), args(2))
- case nme.apply => REF(arrayApplyMethod) APPLY List(args(0), (REF(unboxMethod(IntClass)) APPLY args(1)))
- case nme.clone_ => REF(arrayCloneMethod) APPLY List(args(0))
- }
- def genArrayTest = {
- def oneTest(s: Symbol) = qual IS_OBJ arrayType(s.tpe)
- OR((ObjectClass :: ScalaValueClasses filterNot (_ eq UnitClass)) map oneTest: _*)
+ /** A possible primitive method call, represented by methods in BoxesRunTime. */
+ def genValueCall(operator: Symbol) = fixResult(REF(operator) APPLY args)
+ def genValueCallWithTest = {
+ val (operator, test) = getPrimitiveReplacementForStructuralCall(methSym.name)
+ IF (test) THEN genValueCall(operator) ELSE genDefaultCall
}
- val callCode =
- if (useValueOperator) {
- val (operator, test) = getPrimitiveReplacementForStructuralCall(methSym.name)
- IF (test) THEN fixResult(REF(operator) APPLY args) ELSE defaultCall
- }
- else if (isDefinitelyArray) genArrayCall
- else if (isMaybeArray) IF (genArrayTest) THEN genArrayCall ELSE defaultCall
- else defaultCall
+ /** A native Array call. */
+ def genArrayCall = fixResult(
+ methSym.name match {
+ case nme.length => REF(boxMethod(IntClass)) APPLY (REF(arrayLengthMethod) APPLY args)
+ case nme.update => REF(arrayUpdateMethod) APPLY List(args(0), (REF(unboxMethod(IntClass)) APPLY args(1)), args(2))
+ case nme.apply => REF(arrayApplyMethod) APPLY List(args(0), (REF(unboxMethod(IntClass)) APPLY args(1)))
+ case nme.clone_ => REF(arrayCloneMethod) APPLY List(args(0))
+ },
+ mustBeUnit = methSym.name == nme.update
+ )
- localTyper typed callCode
+ /** A conditional Array call, when we can't determine statically if the argument is
+ * an Array, but the structural type method signature is consistent with an Array method
+ * so we have to generate both kinds of code.
+ */
+ def genArrayCallWithTest =
+ IF ((qual GETCLASS()) DOT nme.isArray) THEN genArrayCall ELSE genDefaultCall
+
+ localTyper typed (
+ if (isMaybeBoxed && isJavaValueMethod) genValueCallWithTest
+ else if (isArrayMethodSignature && isDefinitelyArray) genArrayCall
+ else if (isArrayMethodSignature && isMaybeArray) genArrayCallWithTest
+ else genDefaultCall
+ )
}
if (settings.refinementMethodDispatch.value == "invoke-dynamic") {
@@ -415,8 +457,8 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
/* This creates the tree that does the reflective call (see general comment
* on the apply-dynamic tree for its format). This tree is simply composed
- * of three succesive calls, first to getClass on the callee, then to
- * getMethod on the classs, then to invoke on the method.
+ * of three successive calls, first to getClass on the callee, then to
+ * getMethod on the class, then to invoke on the method.
* - getMethod needs an array of classes for choosing one amongst many
* overloaded versions of the method. This is provided by paramTypeClasses
* and must be done on the static type as Scala's dispatching is static on
@@ -438,10 +480,9 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
val sym = currentOwner.newValue(ad.pos, mkTerm("qual")) setInfo qual0.tpe
qual = REF(sym)
- def structResType = if (isJavaValueClass(resType.typeSymbol)) boxedClass(resType.typeSymbol).tpe else resType
BLOCK(
VAL(sym) === qual0,
- callAsReflective(mparams map (_.tpe), resType, structResType)
+ callAsReflective(mparams map (_.tpe), resType)
)
}
}
@@ -600,7 +641,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
/* Returns the symbol and the tree for the symbol field interning a reference to a symbol 'synmname'.
* If it doesn't exist, i.e. the symbol is encountered the first time,
- * it creates a new static field definition and initalization and returns it.
+ * it creates a new static field definition and initialization and returns it.
*/
private def getSymbolStaticField(pos: Position, symname: String, rhs: Tree, tree: Tree): (Symbol, Tree, Tree) =
symbolStaticFields.getOrElseUpdate(symname, {
diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala
index 9bb7200502..7169516560 100644
--- a/src/compiler/scala/tools/nsc/transform/Constructors.scala
+++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala
@@ -217,7 +217,7 @@ abstract class Constructors extends Transform with ast.TreeDSL {
// Could symbol's definition be omitted, provided it is not accessed?
// This is the case if the symbol is defined in the current class, and
// ( the symbol is an object private parameter accessor field, or
- // the symbol is an outer accessor of a final class which does not override another outer accesser. )
+ // the symbol is an outer accessor of a final class which does not override another outer accessor. )
def maybeOmittable(sym: Symbol) =
(sym.owner == clazz &&
((sym hasFlag PARAMACCESSOR) && sym.isPrivateLocal ||
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index 7722939aaf..ea759b30f5 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -10,7 +10,7 @@ package transform
import scala.tools.nsc.symtab.classfile.ClassfileConstants._
import scala.collection.mutable.{HashMap,ListBuffer}
import scala.collection.immutable.Set
-import scala.tools.nsc.util.Position
+import scala.util.control.ControlThrowable
import symtab._
import Flags._
@@ -18,7 +18,7 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
{
import global._ // the global environment
import definitions._ // standard classes and methods
- // @S: XXX: why is this here? earsure is a typer, if you comment this
+ // @S: XXX: why is this here? erasure is a typer, if you comment this
// out erasure still works, uses its own typed methods.
lazy val typerXXX = this.typer
import typerXXX.{typed} // methods to type trees
@@ -117,15 +117,15 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
*/
val erasure = new TypeMap {
- // Compute the erasure of the intersection type with given `parents` according to new spec.
- private def intersectionErasure(parents: List[Type]): Type =
- if (parents.isEmpty) erasedTypeRef(ObjectClass)
- else apply {
+ // Compute the dominant part of the intersection type with given `parents` according to new spec.
+ def intersectionDominator(parents: List[Type]): Type =
+ if (parents.isEmpty) ObjectClass.tpe
+ else {
val psyms = parents map (_.typeSymbol)
if (psyms contains ArrayClass) {
// treat arrays specially
arrayType(
- intersectionErasure(
+ intersectionDominator(
parents filter (_.typeSymbol == ArrayClass) map (_.typeArgs.head)))
} else {
// implement new spec for erasure of refined types.
@@ -152,7 +152,7 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
else typeRef(apply(pre), sym, args map this)
else if (sym == AnyClass || sym == AnyValClass || sym == SingletonClass) erasedTypeRef(ObjectClass)
else if (sym == UnitClass) erasedTypeRef(BoxedUnitClass)
- else if (sym.isRefinementClass) intersectionErasure(tp.parents)
+ else if (sym.isRefinementClass) apply(intersectionDominator(tp.parents))
else if (sym.isClass) typeRef(apply(rebindInnerClass(pre, sym)), sym, List()) // #2585
else apply(sym.info) // alias type or abstract type
case PolyType(tparams, restpe) =>
@@ -169,7 +169,7 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
else
apply(restpe))
case RefinedType(parents, decls) =>
- intersectionErasure(parents)
+ apply(intersectionDominator(parents))
case AnnotatedType(_, atp, _) =>
apply(atp)
case ClassInfoType(parents, decls, clazz) =>
@@ -192,7 +192,7 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
traverse(st.supertype)
case TypeRef(pre, sym, args) =>
if (sym == ArrayClass) args foreach traverse
- else if (sym.isTypeParameterOrSkolem || sym.isExistential || !args.isEmpty) result = true
+ else if (sym.isTypeParameterOrSkolem || sym.isExistentiallyBound || !args.isEmpty) result = true
else if (sym.isClass) traverse(rebindInnerClass(pre, sym)) // #2585
else if (!sym.owner.isPackageClass) traverse(pre)
case PolyType(_, _) | ExistentialType(_, _) =>
@@ -254,7 +254,7 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
}
}
def classSig: String =
- "L"+atPhase(currentRun.icodePhase)(sym.fullNameString + global.genJVM.moduleSuffix(sym)).replace('.', '/')
+ "L"+atPhase(currentRun.icodePhase)(sym.fullName + global.genJVM.moduleSuffix(sym)).replace('.', '/')
def classSigSuffix: String =
"."+sym.name
if (sym == ArrayClass)
@@ -374,7 +374,7 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
else if (sym == Object_isInstanceOf || sym == ArrayClass)
PolyType(sym.info.typeParams, erasure(sym.info.resultType))
else if (sym.isAbstractType)
- mkTypeBounds(WildcardType, WildcardType)
+ TypeBounds(WildcardType, WildcardType)
else if (sym.isTerm && sym.owner == ArrayClass) {
if (sym.isClassConstructor)
tp match {
@@ -432,6 +432,24 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
override def newTyper(context: Context) = new Eraser(context)
+ /** An extractor object for boxed expressions
+ object Boxed {
+ def unapply(tree: Tree): Option[Tree] = tree match {
+ case LabelDef(name, params, Boxed(rhs)) =>
+ Some(treeCopy.LabelDef(tree, name, params, rhs) setType rhs.tpe)
+ case Select(_, _) if tree.symbol == BoxedUnit_UNIT =>
+ Some(Literal(()) setPos tree.pos setType UnitClass.tpe)
+ case Block(List(unboxed), ret @ Select(_, _)) if ret.symbol == BoxedUnit_UNIT =>
+ Some(if (unboxed.tpe.typeSymbol == UnitClass) tree
+ else Block(List(unboxed), Literal(()) setPos tree.pos setType UnitClass.tpe))
+ case Apply(fn, List(unboxed)) if isBox(fn.symbol) =>
+ Some(unboxed)
+ case _ =>
+ None
+ }
+ }
+ */
+
/** The modifier typer which retypes with erased types. */
class Eraser(context: Context) extends Typer(context) {
@@ -458,6 +476,11 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
* @return the unboxed tree
*/
private def unbox(tree: Tree, pt: Type): Tree = tree match {
+/*
+ case Boxed(unboxed) =>
+ println("unbox shorten: "+tree) // this never seems to kick in during build and test; therefore disabled.
+ adaptToType(unboxed, pt)
+ */
case LabelDef(name, params, rhs) =>
val rhs1 = unbox(rhs, pt)
treeCopy.LabelDef(tree, name, params, rhs1) setType rhs1.tpe
@@ -564,7 +587,7 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
//Console.println("adaptMember: " + tree);
tree match {
case Apply(TypeApply(sel @ Select(qual, name), List(targ)), List()) if tree.symbol == Any_asInstanceOf =>
- val qual1 = typedQualifier(qual)
+ val qual1 = typedQualifier(qual, NOmode, ObjectClass.tpe) // need to have an expected type, see #3037
val qualClass = qual1.tpe.typeSymbol
val targClass = targ.tpe.typeSymbol
/*
@@ -585,7 +608,7 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
else if (tree.symbol.owner == AnyClass)
adaptMember(atPos(tree.pos)(Select(qual, getMember(ObjectClass, name))))
else {
- var qual1 = typedQualifier(qual);
+ var qual1 = typedQualifier(qual)
if ((isValueClass(qual1.tpe.typeSymbol) && !isUnboxedValueMember(tree.symbol)))
qual1 = box(qual1)
else if (!isValueClass(qual1.tpe.typeSymbol) && isUnboxedValueMember(tree.symbol))
@@ -634,7 +657,7 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
Console.println("exception when typing " + tree)
Console.println(er.msg + " in file " + context.owner.sourceFile)
er.printStackTrace
- throw new Error
+ abort()
}
def adaptCase(cdef: CaseDef): CaseDef = {
val body1 = adaptToType(cdef.body, tree1.tpe)
@@ -726,7 +749,7 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
val opc = new overridingPairs.Cursor(root) {
override def exclude(sym: Symbol): Boolean =
(!sym.isTerm || sym.hasFlag(PRIVATE) || super.exclude(sym)
- // specialized members have no type history before 'specialize', causing duble def errors for curried defs
+ // specialized members have no type history before 'specialize', causing double def errors for curried defs
|| !sym.hasTypeAt(currentRun.refchecksPhase.id))
override def matches(sym1: Symbol, sym2: Symbol): Boolean =
@@ -944,6 +967,10 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
tree,
SelectFromArray(qual, name, erasure(qual.tpe)).copyAttrs(fn),
args)
+
+ case Apply(fn @ Select(qual, _), Nil) if (fn.symbol == Any_## || fn.symbol == Object_##) =>
+ Apply(gen.mkAttributedRef(scalaRuntimeHash), List(qual))
+
case Apply(fn, args) =>
if (fn.symbol == Any_asInstanceOf)
fn match {
@@ -1033,18 +1060,13 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
tpt.tpe = erasure(tree.symbol.tpe).resultType
result
case _ =>
- case class MyError(count : Int, ex : AssertionError) extends Error(ex.getMessage)
- try {
- super.transform(tree1) setType null
- } catch {
- case e @ MyError(n, ex) if n > 5 => throw e
- case MyError(n,ex) =>
+ case class LoopControl(count: Int, ex : AssertionError) extends Throwable(ex.getMessage) with ControlThrowable
+
+ try super.transform(tree1) setType null
+ catch {
+ case LoopControl(n, ex) if n <= 5 =>
Console.println(tree1)
- throw MyError(n + 1, ex)
-// case ex : AssertionError =>
-// Console.println(tree1)
-// throw MyError(0, ex)
-// case ex => throw ex
+ throw LoopControl(n + 1, ex)
}
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
index defdeca9c0..dad654c967 100644
--- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
@@ -63,7 +63,7 @@ abstract class ExplicitOuter extends InfoTransform
}
def outerAccessor(clazz: Symbol): Symbol = {
- val firstTry = clazz.info.decl(clazz expandedName nme.OUTER)
+ val firstTry = clazz.info.decl(nme.expandedName(nme.OUTER, clazz))
if (firstTry != NoSymbol && firstTry.outerSource == clazz) firstTry
else clazz.info.decls find (_.outerSource == clazz) getOrElse NoSymbol
}
@@ -84,7 +84,7 @@ abstract class ExplicitOuter extends InfoTransform
* <p>
* Add an outer accessor <code>$outer$$C</code> to every inner class
* with fully qualified name <code>C</code> that is not an interface.
- * The outer accesssor is abstract for traits, concrete for other
+ * The outer accessor is abstract for traits, concrete for other
* classes.
* </p>
* <p>
@@ -105,12 +105,9 @@ abstract class ExplicitOuter extends InfoTransform
def transformInfo(sym: Symbol, tp: Type): Type = tp match {
case MethodType(params, restpe1) =>
val restpe = transformInfo(sym, restpe1)
- if (sym.owner.isTrait && ((sym hasFlag SUPERACCESSOR) || sym.isModule)) { // 5
+ if (sym.owner.isTrait && ((sym hasFlag (ACCESSOR | SUPERACCESSOR)) || sym.isModule)) { // 5
sym.makeNotPrivate(sym.owner)
}
- // moved form the term transformer
- if (sym.owner.isTrait && (sym hasFlag (ACCESSOR | SUPERACCESSOR)))
- sym.makeNotPrivate(sym.owner); //(2)
if (sym.owner.isTrait && (sym hasFlag PROTECTED)) sym setFlag notPROTECTED // 6
if (sym.isClassConstructor && isInner(sym.owner)) { // 1
val p = sym.newValueParameter(sym.pos, "arg" + nme.OUTER)
@@ -154,7 +151,14 @@ abstract class ExplicitOuter extends InfoTransform
if (restp eq restp1) tp else PolyType(tparams, restp1)
case _ =>
- tp
+ // Local fields of traits need to be unconditionally unprivatized.
+ // Reason: Those fields might need to be unprivatized if referenced by an inner class.
+ // On the other hand, mixing in the trait into a separately compiled
+ // class needs to have a common naming scheme, independently of whether
+ // the field was accessed from an inner class or not. See #2946
+ if (sym.owner.isTrait && (sym hasFlag LOCAL) && (sym.getter(sym.owner) == NoSymbol))
+ sym.makeNotPrivate(sym.owner)
+ tp
}
/** A base class for transformers that maintain <code>outerParam</code>
diff --git a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
index 30fcc362d4..b24b263b7a 100644
--- a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
+++ b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
@@ -11,7 +11,6 @@ import symtab._
import Flags._
import util.TreeSet
import scala.collection.mutable.{HashMap, LinkedHashMap, ListBuffer}
-import scala.tools.nsc.util.{Position, NoPosition}
abstract class LambdaLift extends InfoTransform {
import global._
@@ -151,7 +150,7 @@ abstract class LambdaLift extends InfoTransform {
// The param symbol in the MethodType should not be renamed, only the symbol in scope. This way,
// parameter names for named arguments are not changed. Example: without cloning the MethodType,
// def closure(x: Int) = { () => x }
- // would have the signatrue
+ // would have the signature
// closure: (x$1: Int)() => Int
if (sym.hasFlag(PARAM) && sym.owner.info.paramss.exists(_.contains(sym)))
sym.owner.setInfo(sym.owner.info.cloneInfo(sym.owner))
@@ -429,7 +428,7 @@ abstract class LambdaLift extends InfoTransform {
override def transformUnit(unit: CompilationUnit) {
computeFreeVars
atPhase(phase.next)(super.transformUnit(unit))
- assert(liftedDefs.size == 0, liftedDefs.keysIterator.toList)
+ assert(liftedDefs.size == 0, liftedDefs.keys.toList)
}
} // class LambdaLifter
diff --git a/src/compiler/scala/tools/nsc/transform/LazyVals.scala b/src/compiler/scala/tools/nsc/transform/LazyVals.scala
index 03339163a1..4cadc66af1 100644
--- a/src/compiler/scala/tools/nsc/transform/LazyVals.scala
+++ b/src/compiler/scala/tools/nsc/transform/LazyVals.scala
@@ -95,13 +95,11 @@ abstract class LazyVals extends Transform with ast.TreeDSL {
val bmps = bitmaps(methSym) map (ValDef(_, ZERO))
- // Martin to Iulian: Don't we need to compare lengths here?
- def isMatch(params: List[Ident]) = (params.tail, methSym.tpe.params).zipped forall (_.tpe == _.tpe)
+ def isMatch(params: List[Ident]) = (params.tail corresponds methSym.tpe.params)(_.tpe == _.tpe) // @PP: corresponds
if (bmps.isEmpty) rhs else rhs match {
case Block(assign, l @ LabelDef(name, params, rhs1))
if name.toString == ("_" + methSym.name) && isMatch(params) =>
- val sym = l.symbol
Block(assign, treeCopy.LabelDef(l, name, params, typed(prependStats(bmps, rhs1))))
case _ => prependStats(bmps, rhs)
@@ -127,7 +125,7 @@ abstract class LazyVals extends Transform with ast.TreeDSL {
* }
* where bitmap$n is an int value acting as a bitmap of initialized values. It is
* the 'n' is (offset / 32), the MASK is (1 << (offset % 32)). If the value has type
- * unit, no field is used to chache the value, so the resulting code is:
+ * unit, no field is used to cache the value, so the resulting code is:
* {
* if ((bitmap$n & MASK) == 0) {
* <rhs>;
diff --git a/src/compiler/scala/tools/nsc/transform/LiftCode.scala b/src/compiler/scala/tools/nsc/transform/LiftCode.scala
index 574ec23cdb..951fa53041 100644
--- a/src/compiler/scala/tools/nsc/transform/LiftCode.scala
+++ b/src/compiler/scala/tools/nsc/transform/LiftCode.scala
@@ -79,7 +79,7 @@ abstract class LiftCode extends Transform with Reifiers {
gen.mkAttributedRef(definitions.getModule(name))
else {
val name = className(c)
- if (name.length() == 0) throw new Error("don't know how to inject " + value)
+ if (name.length() == 0) abort("don't know how to inject " + value)
val injectedArgs = new ListBuffer[Tree]
for (i <- 0 until c.productArity)
injectedArgs += inject(c.productElement(i))
@@ -103,7 +103,7 @@ abstract class LiftCode extends Transform with Reifiers {
case null =>
gen.mkAttributedRef(definitions.getModule("scala.reflect.NoType"))
case _ =>
- throw new Error("don't know how to inject " + value)
+ abort("don't know how to inject " + value)
}
}
} // Injector
diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala
index 8303ff592a..85f71cabc9 100644
--- a/src/compiler/scala/tools/nsc/transform/Mixin.scala
+++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala
@@ -9,7 +9,6 @@ package transform
import symtab._
import Flags._
-import scala.tools.nsc.util.{Position,NoPosition}
import collection.mutable.{ListBuffer, HashMap}
abstract class Mixin extends InfoTransform with ast.TreeDSL {
@@ -20,7 +19,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
/** The name of the phase: */
val phaseName: String = "mixin"
- /** The phase might set the fiollowing new flags: */
+ /** The phase might set the following new flags: */
override def phaseNewFlags: Long = lateMODULE | notABSTRACT
/** This map contains a binding (class -> info) if
@@ -148,8 +147,6 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* only once per class. The mixedin flag is used to remember whether late
* members have been added to an interface.
* - lazy fields don't get a setter.
- *
- * @param clazz ...
*/
def addLateInterfaceMembers(clazz: Symbol) {
if ((treatedClassInfos get clazz) != Some(clazz.info)) {
@@ -176,7 +173,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
setter.setInfo(MethodType(setter.newSyntheticValueParams(List(field.info)), UnitClass.tpe))
if (needsExpandedSetterName(field)) {
//println("creating expanded setter from "+field)
- setter.name = clazz.expandedSetterName(setter.name)
+ setter.name = nme.expandedSetterName(setter.name, clazz)
}
setter
}
@@ -213,7 +210,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* - if a member M of T is forwarded to the implementation class, add
* a forwarder for M unless one exists already.
* The alias of the forwarder is the static member it forwards to.
- * - for every abstract accessor in T, add a field and an implementation for that acessor
+ * - for every abstract accessor in T, add a field and an implementation for that accessor
* - for every super accessor in T, add an implementation of that accessor
* - for every module in T, add a module
*/
@@ -736,7 +733,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
stats1
}
- /** Does this field require an intialized bit? */
+ /** Does this field require an initialized bit? */
def needsInitFlag(sym: Symbol) = {
val res = (settings.checkInit.value
&& sym.isGetter
@@ -746,7 +743,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
&& !sym.isOuterAccessor)
if (settings.debug.value) {
- log("needsInitFlag(" + sym.fullNameString + "): " + res)
+ log("needsInitFlag(" + sym.fullName + "): " + res)
log("\tsym.isGetter: " + sym.isGetter)
log("\t!isInitializedToDefault: " + !sym.isInitializedToDefault + sym.hasFlag(DEFAULTINIT) + sym.hasFlag(ACCESSOR) + sym.isTerm)
log("\t!sym.hasFlag(PARAMACCESSOR): " + !sym.hasFlag(PARAMACCESSOR))
@@ -820,7 +817,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
def buildFieldPositions(clazz: Symbol) {
var fields = usedBits(clazz)
for (f <- clazz.info.decls.iterator if needsInitFlag(f) || f.hasFlag(LAZY)) {
- if (settings.debug.value) log(f.fullNameString + " -> " + fields)
+ if (settings.debug.value) log(f.fullName + " -> " + fields)
fieldOffset(f) = fields
fields += 1
}
@@ -994,7 +991,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
case Super(_, mix) =>
// change super calls to methods in implementation classes to static calls.
// Transform references super.m(args) as follows:
- // - if `m' refers to a trait, insert a static call to the correspondign static
+ // - if `m' refers to a trait, insert a static call to the corresponding static
// implementation
// - otherwise return tree unchanged
if (mix == nme.EMPTY.toTypeName && currentOwner.enclClass.isImplClass)
diff --git a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
index cdbea6fcfe..305f9218ec 100644
--- a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
+++ b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
@@ -15,7 +15,7 @@ import annotation.tailrec
/** A class that yields a kind of iterator (`Cursor`),
* which yields all pairs of overriding/overridden symbols
* that are visible in some baseclass, unless there's a parent class
- * that aleady contains the same pairs.
+ * that already contains the same pairs.
* @author Martin Odersky
* @version 1.0
*/
@@ -42,7 +42,7 @@ abstract class OverridingPairs {
*/
protected def parents: List[Type] = base.info.parents
- /** Does `sym1` match `sym2` so that it qualifies as overiding.
+ /** Does `sym1` match `sym2` so that it qualifies as overriding.
* Types always match. Term symbols match if their membertypes
* relative to <base>.this do
*/
@@ -99,8 +99,8 @@ abstract class OverridingPairs {
private val size = base.info.baseClasses.length
- /** A map from baseclasses of <base> to ints, with smaller ints meansing lower in
- * lineraizatuon order.
+ /** A map from baseclasses of <base> to ints, with smaller ints meaning lower in
+ * linearization order.
*/
private val index = new HashMap[Symbol, Int]
@@ -176,13 +176,13 @@ abstract class OverridingPairs {
/** The current entry candidate for overridden */
private var nextEntry = curEntry
- /** The current candidate symbol for overridding */
+ /** The current candidate symbol for overriding */
var overriding: Symbol = _
- /** If not null: The symbol overridden by overridding */
+ /** If not null: The symbol overridden by overriding */
var overridden: Symbol = _
- //@M: note that next is called once during object initialisation
+ //@M: note that next is called once during object initialization
def hasNext: Boolean = curEntry ne null
@tailrec
diff --git a/src/compiler/scala/tools/nsc/transform/Reifiers.scala b/src/compiler/scala/tools/nsc/transform/Reifiers.scala
index d4290bff59..eef260f5a3 100644
--- a/src/compiler/scala/tools/nsc/transform/Reifiers.scala
+++ b/src/compiler/scala/tools/nsc/transform/Reifiers.scala
@@ -18,11 +18,14 @@ trait Reifiers {
if (sym.isClass) reflect.Class(fullname)
else if (sym.isType) reflect.TypeField(fullname, reify(sym.info))
else if (sym.isMethod) reflect.Method(fullname, reify(sym.info))
+ else if (sym.isValueParameter) reflect.LocalValue(reflect.NoSymbol, fullname, reify(sym.info))
else reflect.Field(fullname, reify(sym.info));
def reify(sym: Symbol): reflect.Symbol = {
if (sym.isRoot || sym.isRootPackage || sym.isEmptyPackageClass || sym.isEmptyPackage)
reflect.RootSymbol
+ else if (sym.isValueParameter)
+ mkGlobalSymbol(sym.name.toString, sym)
else if (sym.owner.isTerm)
reflect.NoSymbol
else reify(sym.owner) match {
@@ -216,8 +219,6 @@ trait Reifiers {
if (rsym == reflect.NoSymbol) throw new TypeError("cannot reify symbol: " + tree.symbol)
else reflect.Select(reify(qual), reify(tree.symbol))
- case _ : StubTree => reflect.Literal(0)
-
case Literal(constant) =>
reflect.Literal(constant.value)
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index 6937658534..f76ab66aa4 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -9,7 +9,6 @@ package transform
import scala.tools.nsc.symtab.Flags
import scala.tools.nsc.util.FreshNameCreator
-import scala.tools.nsc.util.Position
import scala.collection.{mutable, immutable}
@@ -43,7 +42,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
env
}
- /** Is this typeenv included in `other'? All type variables in this environement
+ /** Is this typeenv included in `other'? All type variables in this environment
* are defined in `other' and bound to the same type.
*/
def includes(t1: TypeEnv, t2: TypeEnv) = {
@@ -55,7 +54,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
}
- /** Reduce the given environment to contain mappins only for type variables in tps. */
+ /** Reduce the given environment to contain mappings only for type variables in tps. */
def reduce(env: TypeEnv, tps: immutable.Set[Symbol]): TypeEnv = {
env filter { kv => tps.contains(kv._1)}
}
@@ -92,7 +91,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case class Overload(sym: Symbol, env: TypeEnv) {
override def toString: String =
- "specalized overload " + sym + " in " + env
+ "specialized overload " + sym + " in " + env
}
/** The annotation used to mark specialized type parameters. */
@@ -159,7 +158,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
def hasSpecializedParams(clazz: Symbol): Boolean =
!specializedParams(clazz).isEmpty
- /** Return specialized type paramters. */
+ /** Return specialized type parameters. */
def specializedParams(sym: Symbol): List[Symbol] =
splitParams(sym.info.typeParams)._1
@@ -174,7 +173,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
override def apply(tp: Type): Type = tp match {
case TypeRef(pre, sym, args) if !args.isEmpty =>
val pre1 = this(pre)
- val args1 = args map this
+ val args1 = args// map this
val unspecArgs = unspecializedArgs(sym, args)
specializedClass.get((sym, TypeEnv.fromSpecialization(sym, args1))) match {
case Some(sym1) =>
@@ -183,7 +182,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case None =>
typeRef(pre1, sym, args1)
}
- case _ => mapOver(tp)
+ case _ => tp // mapOver(tp)
}
}
@@ -199,8 +198,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
val tvars = if (sym.isClass) env.keySet
else specializedTypeVars(sym.info).intersect(env.keySet)
val (methparams, others) = tvars.toList.partition(_.owner.isMethod)
- val tvars1 = methparams.sortWith(_.name.toString < _.name.toString)
- val tvars2 = others.sortWith(_.name.toString < _.name.toString)
+ val tvars1 = methparams sortBy (_.name.toString)
+ val tvars2 = others sortBy (_.name.toString)
log("specName(" + sym + ") env " + env)
specializedName(sym.name, tvars1 map env, tvars2 map env)
}
@@ -285,7 +284,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
Nil
}
- /** Return a list of all type environements for all specializations
+ /** Return a list of all type environments for all specializations
* of @specialized types in `tps'.
*/
private def specializations(tps: List[Symbol]): List[TypeEnv] = {
@@ -373,16 +372,24 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
typeEnv(cls) = env
this.specializedClass((clazz, env)) = cls
+ // declarations of the newly specialized class 'cls'
val decls1 = new Scope
+ // original unspecialized type parameters
+ var oldClassTParams: List[Symbol] = Nil
+
+ // unspecialized type parameters of 'cls' (cloned)
+ var newClassTParams: List[Symbol] = Nil
+
val specializedInfoType: Type = {
val (_, unspecParams) = splitParams(clazz.info.typeParams)
- val tparams1 = cloneSymbols(unspecParams, cls)
- var parents = List(subst(env, clazz.tpe).subst(unspecParams, tparams1 map (_.tpe)))
+ oldClassTParams = unspecParams
+ newClassTParams = cloneSymbols(unspecParams, cls)
+ var parents = List(subst(env, clazz.tpe).subst(unspecParams, newClassTParams map (_.tpe)))
if (parents.head.typeSymbol.isTrait)
parents = parents.head.parents.head :: parents
val infoType = ClassInfoType(parents, decls1, cls)
- if (tparams1.isEmpty) infoType else PolyType(tparams1, infoType)
+ if (newClassTParams.isEmpty) infoType else PolyType(newClassTParams, infoType)
}
atPhase(phase.next)(cls.setInfo(specializedInfoType))
@@ -396,11 +403,11 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
*/
def enterMember(sym: Symbol): Symbol = {
typeEnv(sym) = fullEnv ++ typeEnv(sym) // append the full environment
- sym.setInfo(sym.info.substThis(clazz, ThisType(cls)))
+ sym.setInfo(sym.info.substThis(clazz, ThisType(cls)).subst(oldClassTParams, newClassTParams map (_.tpe)))
decls1.enter(subst(fullEnv)(sym))
}
- /** Create and enter in scope an overriden symbol m1 for `m' that forwards
+ /** Create and enter in scope an overridden symbol m1 for `m' that forwards
* to `om'. `om' is a fresh, special overload of m1 that is an implementation
* of `m'. For example, for a
*
@@ -423,7 +430,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
info(specMember) = Forward(om)
- info(om) = Implementation(original)
+ info(om) = if (original.isDeferred) Forward(original) else Implementation(original)
+ log("forwardToOverlad: " + " original.isDeferred: " + original.isDeferred + " info(om): " + info(om))
typeEnv(om) = env ++ typeEnv(m) // add the environment for any method tparams
enterMember(om)
@@ -459,7 +467,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
} else if (m.isDeferred) { // abstract methods
val specMember = enterMember(m.cloneSymbol(cls)).setFlag(SPECIALIZED).resetFlag(DEFERRED)
- log("deferred " + specMember.fullNameString + " is forwarded")
+ log("deferred " + specMember.fullName + " is forwarded")
info(specMember) = new Forward(specMember) {
override def target = m.owner.info.member(specializedName(m, env))
@@ -526,14 +534,14 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
typeEnv(specClass) = fullEnv
specClass.name = specializedName(specClass, fullEnv)
enterMember(specClass)
- log("entered specialized class with info " + specClass.fullNameString + ": " + specClass.info)
+ log("entered specialized class with info " + specClass.fullName + ": " + specClass.info)
info(specClass) = SpecializedInnerClass(m, fullEnv)
}
}
cls
}
- log("specializeClass " + clazz.fullNameString)
+ log("specializeClass " + clazz.fullName)
val decls1 = (clazz.info.decls.toList flatMap { m: Symbol =>
if (m.isAnonymousClass) List(m) else {
normalizeMember(m.owner, m, outerEnv) flatMap { normalizedMember =>
@@ -649,8 +657,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
val opc = new overridingPairs.Cursor(clazz)
val oms = new mutable.ListBuffer[Symbol]
while (opc.hasNext) {
- log("\toverriding pairs: " + opc.overridden.fullNameString + ": " + opc.overridden.info
- + " overriden by " + opc.overriding.fullNameString + ": " + opc.overriding.info)
+ log("\toverriding pairs: " + opc.overridden.fullName + ": " + opc.overridden.info
+ + " overridden by " + opc.overriding.fullName + ": " + opc.overriding.info)
if (opc.overriding.owner == clazz && !specializedTypeVars(opc.overridden.info).isEmpty) {
log("\t\tspecializedTVars: " + specializedTypeVars(opc.overridden.info))
val env = unify(opc.overridden.info, opc.overriding.info, emptyEnv)
@@ -660,12 +668,22 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
if (!env.isEmpty
&& TypeEnv.isValid(env, opc.overridden)
&& opc.overridden.owner.info.decl(specializedName(opc.overridden, env)) != NoSymbol) {
- log("Added specialized overload for " + opc.overriding.fullNameString + " in env: " + env)
+ log("Added specialized overload for " + opc.overriding.fullName + " in env: " + env)
val om = specializedOverload(clazz, opc.overridden, env)
+ typeEnv(om) = env
if (!opc.overriding.isDeferred) {
concreteSpecMethods += opc.overriding
- info(om) = Implementation(opc.overriding)
+ // if the override is a normalized member, 'om' gets the implementation from
+ // its original target, and adds the environment of the normalized member (that is,
+ // any specialized /method/ type parameter bindings)
+ info(om) = info.get(opc.overriding) match {
+ case Some(NormalizedMember(target)) =>
+ typeEnv(om) = env ++ typeEnv(opc.overriding)
+ Implementation(target)
+ case None => Implementation(opc.overriding)
+ }
info(opc.overriding) = Forward(om)
+ log("typeEnv(om) = " + typeEnv(om))
}
overloads(opc.overriding) = Overload(om, env) :: overloads(opc.overriding)
oms += om
@@ -724,7 +742,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
override def default(key: Symbol) = emptyEnv
}
- /** Apply type bindings in the given environement `env' to all declarations. */
+ /** Apply type bindings in the given environment `env' to all declarations. */
private def subst(env: TypeEnv, decls: List[Symbol]): List[Symbol] =
decls map subst(env)
@@ -782,8 +800,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
PolyType(targs, ClassInfoType(parents, new Scope(specializeClass(clazz, typeEnv(clazz))), clazz))
case ClassInfoType(base, decls, clazz) if !clazz.isPackageClass =>
-// val parents = base map specializedType
- log("transformInfo " + clazz )
+ val parents = base map specializedType
+ log("transformInfo " + clazz + " with parents1: " + parents)
val res = ClassInfoType(base map specializedType, new Scope(specializeClass(clazz, typeEnv(clazz))), clazz)
res
@@ -806,13 +824,14 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
*/
def conflicting(env: TypeEnv, warn: (Position, String) => Unit): Boolean =
env exists { case (tvar, tpe) =>
- if (!(subst(env, tvar.info.bounds.lo) <:< tpe) && (tpe <:< subst(env, tvar.info.bounds.hi))) {
+ if (!((subst(env, tvar.info.bounds.lo) <:< tpe)
+ && (tpe <:< subst(env, tvar.info.bounds.hi)))) {
warn(tvar.pos, "Bounds prevent specialization for " + tvar)
true
} else false
}
- /** The type environemnt is sound w.r.t. to all type bounds or only soft
+ /** The type environment is sound w.r.t. to all type bounds or only soft
* conflicts appear. An environment is sound if all bindings are within
* the bounds of the given type variable. A soft conflict is a binding
* that does not fall within the bounds, but whose bounds contain
@@ -911,7 +930,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
Apply(
Select(New(TypeTree(findSpec(tpt.tpe))), nme.CONSTRUCTOR),
transformTrees(args))))
- } else tree
+ } else super.transform(tree)
case TypeApply(Select(qual, name), targs) if (!specializedTypeVars(symbol.info).isEmpty && name != nme.CONSTRUCTOR) =>
log("checking typeapp for rerouting: " + tree + " with sym.tpe: " + symbol.tpe + " tree.tpe: " + tree.tpe)
@@ -932,8 +951,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case None => super.transform(tree)
}
- case Select(qual, name) if (/*!symbol.isMethod
- &&*/ !specializedTypeVars(symbol.info).isEmpty
+ case Select(qual, name) if (!specializedTypeVars(symbol.info).isEmpty
&& name != nme.CONSTRUCTOR) =>
val qual1 = transform(qual)
log("checking for unification at " + tree + " with sym.tpe: " + symbol.tpe + " and tree.tpe: " + tree.tpe + " at " + tree.pos.line)
@@ -942,7 +960,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
if (!env.isEmpty) {
val specMember = overload(symbol, env)
if (specMember.isDefined) {
- log("** routing " + tree + " to " + specMember.get.sym.fullNameString + " tree: " + Select(qual1, specMember.get.sym.name))
+ log("** routing " + tree + " to " + specMember.get.sym.fullName + " tree: " + Select(qual1, specMember.get.sym.name))
localTyper.typedOperator(atPos(tree.pos)(Select(qual1, specMember.get.sym.name)))
} else {
val specMember = qual1.tpe.member(specializedName(symbol, env))
@@ -956,7 +974,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
super.transform(tree)
case PackageDef(pid, stats) =>
- tree.symbol.info // make sure specializations have been peformed
+ tree.symbol.info // make sure specializations have been performed
log("PackageDef owner: " + symbol)
atOwner(tree, symbol) {
val specMembers = implSpecClasses(stats) map localTyper.typed
@@ -967,7 +985,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
val specMembers = makeSpecializedMembers(tree.symbol.enclClass) ::: (implSpecClasses(body) map localTyper.typed)
if (!symbol.isPackageClass)
(new CollectMethodBodies)(tree)
- treeCopy.Template(tree, parents, self, atOwner(currentOwner)(transformTrees(body ::: specMembers)))
+ treeCopy.Template(tree, currentOwner.info.parents.map(TypeTree), self,
+ atOwner(currentOwner)(transformTrees(body ::: specMembers)))
case ddef @ DefDef(mods, name, tparams, vparamss, tpt, rhs) if info.isDefinedAt(symbol) =>
if (symbol.isConstructor) {
@@ -981,7 +1000,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
} else info(symbol) match {
case Implementation(target) =>
- assert(body.isDefinedAt(target), "sym: " + symbol.fullNameString + " target: " + target.fullNameString)
+ assert(body.isDefinedAt(target), "sym: " + symbol.fullName + " target: " + target.fullName)
// we have an rhs, specialize it
val tree1 = duplicateBody(ddef, target)
log("implementation: " + tree1)
@@ -991,7 +1010,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case NormalizedMember(target) =>
log("normalized member " + symbol + " of " + target)
- if (conflicting(typeEnv(symbol))) {
+ if (target.isDeferred || conflicting(typeEnv(symbol))) {
+/*
val targs = makeTypeArguments(symbol, target)
log("targs: " + targs)
val call =
@@ -1006,10 +1026,12 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
maybeCastTo(symbol.info.finalResultType,
target.info.subst(target.info.typeParams, targs).finalResultType,
call)))
+*/
-/* copy.DefDef(tree, mods, name, tparams, vparamss, tpt,
- typed(Apply(gen.mkAttributedRef(definitions.Predef_error),
- List(Literal("boom! you stepped on a bug. This method should never be called.")))))*/
+ treeCopy.DefDef(tree, mods, name, tparams, vparamss, tpt,
+ localTyper.typed(
+ Apply(gen.mkAttributedRef(definitions.Predef_error),
+ List(Literal("boom! you stepped on a bug. This method should never be called.")))))
} else {
// we have an rhs, specialize it
val tree1 = duplicateBody(ddef, target)
@@ -1019,7 +1041,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
case SpecialOverload(original, env) =>
- log("completing specialized " + symbol.fullNameString + " calling " + original)
+ log("completing specialized " + symbol.fullName + " calling " + original)
val t = DefDef(symbol, { vparamss =>
val fun = Apply(Select(This(symbol.owner), original),
makeArguments(original, vparamss.head))
@@ -1047,7 +1069,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case ValDef(mods, name, tpt, rhs) if symbol.hasFlag(SPECIALIZED) =>
assert(body.isDefinedAt(symbol.alias))
val tree1 = treeCopy.ValDef(tree, mods, name, tpt, body(symbol.alias).duplicate)
- log("now typing: " + tree1 + " in " + tree.symbol.owner.fullNameString)
+ log("now typing: " + tree1 + " in " + tree.symbol.owner.fullName)
val d = new Duplicator
d.retyped(localTyper.context1.asInstanceOf[d.Context],
tree1,
@@ -1055,6 +1077,11 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
symbol.enclClass,
typeEnv(symbol.alias) ++ typeEnv(tree.symbol))
+ case Apply(Select(Super(qual, name), name1), args) =>
+ val res = localTyper.typed(Apply(Select(Super(qual, name), name1), args))
+ log("retyping call to super, from: " + symbol + " to " + res.symbol)
+ res
+
case _ =>
super.transform(tree)
}
@@ -1068,7 +1095,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
private def duplicateBody(tree: DefDef, target: Symbol): Tree = {
val symbol = tree.symbol
- log("specializing body of" + symbol.fullNameString + ": " + symbol.info)
+ log("specializing body of" + symbol.fullName + ": " + symbol.info)
val DefDef(mods, name, tparams, vparamss, tpt, _) = tree
val (_, origtparams) = splitParams(target.typeParams)
log("substituting " + origtparams + " for " + symbol.typeParams)
@@ -1081,7 +1108,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
ValDef(param.cloneSymbol(symbol).setInfo(param.info.substSym(oldtparams, newtparams)))
})
- // replace value and type paremeters of the old method with the new ones
+ // replace value and type parameters of the old method with the new ones
val symSubstituter = new ImplementationAdapter(
parameters(target).flatten ::: origtparams,
vparamss1.flatten.map(_.symbol) ::: newtparams)
@@ -1091,7 +1118,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
val meth = treeCopy.DefDef(tree, mods, name, tparams, vparamss1, tpt, tmp)
- log("now typing: " + meth + " in " + symbol.owner.fullNameString)
+ log("now typing: " + meth + " in " + symbol.owner.fullName)
val d = new Duplicator
d.retyped(localTyper.context1.asInstanceOf[d.Context],
meth,
@@ -1153,10 +1180,10 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
override def transform(tree: Tree): Tree = {
val tree1 = super.transform(tree)
if (needsCast(tree1)) {
- log("inserting cast for " + tree1 + " tpe: " + tree1.tpe)
- val tree2 = gen.mkAsInstanceOf(tree1, tree1.tpe.typeSymbol.info.bounds.hi)
- log(" casted to: " + tree2)
- tree2
+// log("inserting cast for " + tree1 + " tpe: " + tree1.tpe)
+// val tree2 = gen.mkAsInstanceOf(tree1, tree1.tpe.typeSymbol.info.bounds.hi)
+// log(" casted to: " + tree2)
+ tree1
} else
tree1
}
@@ -1180,7 +1207,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
if m.hasFlag(SPECIALIZED)
&& (m.sourceFile ne null)
&& satisfiable(typeEnv(m), warn(cls))) {
- log("creating tree for " + m.fullNameString)
+ log("creating tree for " + m.fullName)
if (m.isMethod) {
if (m.isClassConstructor) {
val origParamss = parameters(info(m).target)
@@ -1194,7 +1221,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
// param accessors for private members (the others are inherited from the generic class)
for (param <- vparams if cls.info.nonPrivateMember(param.name) == NoSymbol;
val acc = param.cloneSymbol(cls).setFlag(PARAMACCESSOR | PRIVATE)) {
- log("param accessor for " + acc.fullNameString)
+ log("param accessor for " + acc.fullName)
cls.info.decls.enter(acc)
mbrs += ValDef(acc, EmptyTree).setType(NoType).setPos(m.pos)
}
@@ -1208,7 +1235,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
// mbrs +=
// ClassDef(m, Template(m.info.parents map TypeTree, emptyValDef, List())
// .setSymbol(m.newLocalDummy(m.pos)))
-// log("created synthetic class: " + m.fullNameString)
+// log("created synthetic class: " + m.fullName)
}
}
mbrs.toList
diff --git a/src/compiler/scala/tools/nsc/transform/TailCalls.scala b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
index 1575ec1384..adeab550ee 100644
--- a/src/compiler/scala/tools/nsc/transform/TailCalls.scala
+++ b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
@@ -72,7 +72,7 @@ abstract class TailCalls extends Transform
* are optimized. Since 'this' is not a local variable, a dummy local val
* is added and used as a label parameter. The backend knows to load
* the corresponding argument in the 'this' (local at index 0). This dummy local
- * is never used and should be cleand up by dead code elmination (when enabled).
+ * is never used and should be cleand up by dead code elimination (when enabled).
* </p>
* <p>
* This phase has been moved before pattern matching to catch more
@@ -162,7 +162,7 @@ abstract class TailCalls extends Transform
newCtx.label.setInfo(MethodType(currentClassParam :: tree.symbol.tpe.params, tree.symbol.tpe.finalResultType))
newCtx.tailPos = true
- val isEligible = newCtx.currentMethod.isFinal || (newCtx.currentMethod.enclClass hasFlag Flags.MODULE)
+ val isEligible = newCtx.currentMethod.isEffectivelyFinal || (newCtx.currentMethod.enclClass hasFlag Flags.MODULE)
// If -Ytailrecommend is given, we speculatively try transforming ineligible methods and
// report where we would have been successful.
val recommend = settings.Ytailrec.value
@@ -270,7 +270,7 @@ abstract class TailCalls extends Transform
case Apply(tapply @ TypeApply(fun, targs), vargs) =>
lazy val defaultTree = treeCopy.Apply(tree, tapply, transformTrees(vargs, mkContext(ctx, false)))
- if ( ctx.currentMethod.isFinal &&
+ if ( ctx.currentMethod.isEffectivelyFinal &&
ctx.tailPos &&
isSameTypes(ctx.tparams, targs map (_.tpe.typeSymbol)) &&
isRecursiveCall(fun)) {
@@ -299,7 +299,7 @@ abstract class TailCalls extends Transform
case Apply(fun, args) =>
lazy val defaultTree = treeCopy.Apply(tree, fun, transformTrees(args, mkContext(ctx, false)))
- if (ctx.currentMethod.isFinal &&
+ if (ctx.currentMethod.isEffectivelyFinal &&
ctx.tailPos &&
isRecursiveCall(fun)) {
fun match {
diff --git a/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala b/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala
index 4519f1a486..65f3caf642 100644
--- a/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala
+++ b/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala
@@ -7,7 +7,6 @@
package scala.tools.nsc
package transform
-import util.Position
import scala.collection.mutable.{Map, HashMap}
/** A base class for transforms.
@@ -31,7 +30,7 @@ trait TypingTransformers {
def atOwner[A](tree: Tree, owner: Symbol)(trans: => A): A = {
val savedLocalTyper = localTyper
-// println("ttransformer atOwner: " + owner + " isPackage? " + owner.isPackage)
+// println("transformer atOwner: " + owner + " isPackage? " + owner.isPackage)
localTyper = localTyper.atOwner(tree, if (owner.isModule) owner.moduleClass else owner)
typers += Pair(owner, localTyper)
val result = super.atOwner(owner)(trans)
diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
index dc475b4173..e339560837 100644
--- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala
+++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
@@ -9,7 +9,6 @@ package transform
import symtab.Flags._
import scala.collection.mutable.{HashMap, HashSet}
-import scala.tools.nsc.util.Position
/*<export>*/
/** - uncurry all symbol and tree types (@see UnCurryPhase)
@@ -62,10 +61,8 @@ abstract class UnCurry extends InfoTransform with TypingTransformers {
case MethodType(params, MethodType(params1, restpe)) =>
apply(MethodType(params ::: params1, restpe))
case MethodType(params, ExistentialType(tparams, restpe @ MethodType(_, _))) =>
- assert(false, "unexpected curried method types with intervening exitential")
+ assert(false, "unexpected curried method types with intervening existential")
tp0
- case mt: ImplicitMethodType =>
- apply(MethodType(mt.params, mt.resultType))
case PolyType(List(), restpe) => // nullary method type
apply(MethodType(List(), restpe))
case PolyType(tparams, restpe) => // polymorphic nullary method type, since it didn't occur in a higher-kinded position
@@ -175,7 +172,7 @@ abstract class UnCurry extends InfoTransform with TypingTransformers {
throw ex
}
- /* Is tree a reference `x' to a call by name parameter that neeeds to be converted to
+ /* Is tree a reference `x' to a call by name parameter that needs to be converted to
* x.apply()? Note that this is not the case if `x' is used as an argument to another
* call by name parameter.
*/
@@ -199,7 +196,7 @@ abstract class UnCurry extends InfoTransform with TypingTransformers {
/** The type of a non-local return expression with given argument type */
private def nonLocalReturnExceptionType(argtype: Type) =
- appliedType(NonLocalReturnExceptionClass.typeConstructor, List(argtype))
+ appliedType(NonLocalReturnControlClass.typeConstructor, List(argtype))
/** A hashmap from method symbols to non-local return keys */
private val nonLocalReturnKeys = new HashMap[Symbol, Symbol]
@@ -217,9 +214,9 @@ abstract class UnCurry extends InfoTransform with TypingTransformers {
/** Generate a non-local return throw with given return expression from given method.
* I.e. for the method's non-local return key, generate:
*
- * throw new NonLocalReturnException(key, expr)
+ * throw new NonLocalReturnControl(key, expr)
* todo: maybe clone a pre-existing exception instead?
- * (but what to do about excaptions that miss their targets?)
+ * (but what to do about exceptions that miss their targets?)
*/
private def nonLocalReturnThrow(expr: Tree, meth: Symbol) =
localTyper.typed {
@@ -236,7 +233,7 @@ abstract class UnCurry extends InfoTransform with TypingTransformers {
* try {
* body
* } catch {
- * case ex: NonLocalReturnException[_] =>
+ * case ex: NonLocalReturnControl[_] =>
* if (ex.key().eq(key)) ex.value()
* else throw ex
* }
@@ -248,7 +245,7 @@ abstract class UnCurry extends InfoTransform with TypingTransformers {
val ex = meth.newValue(body.pos, nme.ex) setInfo extpe
val pat = Bind(ex,
Typed(Ident(nme.WILDCARD),
- AppliedTypeTree(Ident(NonLocalReturnExceptionClass),
+ AppliedTypeTree(Ident(NonLocalReturnControlClass),
List(Bind(nme.WILDCARD.toTypeName,
EmptyTree)))))
val rhs =
@@ -478,6 +475,16 @@ abstract class UnCurry extends InfoTransform with TypingTransformers {
}
}
+ /** For removing calls to specially designated methods.
+ */
+ def elideIntoUnit(tree: Tree): Tree = Literal(()) setPos tree.pos setType UnitClass.tpe
+ def isElidable(tree: Tree) = {
+ val sym = tree.symbol
+ // XXX settings.noassertions.value temporarily retained to avoid
+ // breakage until a reasonable interface is settled upon.
+ sym != null && sym.elisionLevel.exists(x => x < settings.elidebelow.value || settings.noassertions.value)
+ }
+
// ------ The tree transformers --------------------------------------------------------
def mainTransform(tree: Tree): Tree = {
@@ -582,21 +589,15 @@ abstract class UnCurry extends InfoTransform with TypingTransformers {
treeCopy.UnApply(tree, fn1, args1)
case Apply(fn, args) =>
- // XXX settings.noassertions.value temporarily retained to avoid
- // breakage until a reasonable interface is settled upon.
- def elideFunctionCall(sym: Symbol) =
- sym != null && sym.elisionLevel.exists(x => x < settings.elideLevel.value || settings.noassertions.value)
-
- if (elideFunctionCall(fn.symbol)) {
- Literal(()).setPos(tree.pos).setType(UnitClass.tpe)
- } else if (fn.symbol == Object_synchronized && shouldBeLiftedAnyway(args.head)) {
+ if (isElidable(fn))
+ elideIntoUnit(tree)
+ else if (fn.symbol == Object_synchronized && shouldBeLiftedAnyway(args.head))
transform(treeCopy.Apply(tree, fn, List(liftTree(args.head))))
- } else {
+ else
withNeedLift(true) {
val formals = fn.tpe.paramTypes
treeCopy.Apply(tree, transform(fn), transformTrees(transformArgs(tree.pos, fn.symbol, args, formals)))
}
- }
case Assign(Select(_, _), _) =>
withNeedLift(true) { super.transform(tree) }
@@ -635,16 +636,21 @@ abstract class UnCurry extends InfoTransform with TypingTransformers {
} setType uncurryTreeType(tree.tpe)
def postTransform(tree: Tree): Tree = atPhase(phase.next) {
- def applyUnary(): Tree =
- if (tree.symbol.isMethod &&
- (!tree.tpe.isInstanceOf[PolyType] || tree.tpe.typeParams.isEmpty)) {
- if (!tree.tpe.isInstanceOf[MethodType]) tree.tpe = MethodType(List(), tree.tpe);
- atPos(tree.pos)(Apply(tree, List()) setType tree.tpe.resultType)
- } else if (tree.isType) {
- TypeTree(tree.tpe) setPos tree.pos
- } else {
- tree
+ def applyUnary(): Tree = {
+ def needsParens = tree.symbol.isMethod && (!tree.tpe.isInstanceOf[PolyType] || tree.tpe.typeParams.isEmpty)
+ def repair = {
+ if (!tree.tpe.isInstanceOf[MethodType])
+ tree.tpe = MethodType(Nil, tree.tpe)
+
+ atPos(tree.pos)(Apply(tree, Nil) setType tree.tpe.resultType)
}
+
+ if (isElidable(tree)) elideIntoUnit(tree) // was not seen in mainTransform
+ else if (needsParens) repair
+ else if (tree.isType) TypeTree(tree.tpe) setPos tree.pos
+ else tree
+ }
+
tree match {
case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
val rhs1 = nonLocalReturnKeys.get(tree.symbol) match {
diff --git a/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala b/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala
index 0317a3302c..f1e88fe6e8 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala
@@ -42,7 +42,7 @@ abstract class ConstantFolder {
private def fold(tree: Tree, compX: => Constant): Tree =
try {
val x = compX
- if ((x ne null) && x.tag != UnitTag) tree setType mkConstantType(x)
+ if ((x ne null) && x.tag != UnitTag) tree setType ConstantType(x)
else tree
} catch {
case _: ArithmeticException => tree // the code will crash at runtime,
@@ -154,7 +154,7 @@ abstract class ConstantFolder {
private def foldBinop(op: Name, x: Constant, y: Constant): Constant = {
val optag =
if (x.tag == y.tag) x.tag
- else if (isNumeric(x.tag) && isNumeric(y.tag)) math.max(x.tag, y.tag)
+ else if (x.isNumeric && y.isNumeric) math.max(x.tag, y.tag)
else NoTag
try optag match {
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index dd592bb96d..becb4b069f 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -8,7 +8,6 @@ package scala.tools.nsc
package typechecker
import symtab.Flags._
-import scala.tools.nsc.util.{Position,NoPosition}
import scala.collection.mutable.ListBuffer
/** This trait ...
@@ -122,40 +121,6 @@ trait Contexts { self: Analyzer =>
var savedTypeBounds: List[(Symbol, Type)] = List() // saved type bounds
// for type parameters which are narrowed in a GADT
- def intern0 : Context = {
- if (this eq NoContext) return this
- val txt = new Context
- txt.unit = unit
- txt.tree = tree
- txt.owner = owner
- txt.scope = scope
- assert(outer ne this) // stupid
- txt.outer = outer // already interned
- def fix(what : Context) =
- if (what eq this) txt
- else what
- txt.enclClass = fix(enclClass)
- txt.enclMethod = fix(enclMethod)
- txt.implicitsEnabled = implicitsEnabled
- txt.variance = variance
- txt._undetparams = _undetparams
- txt.depth = depth
- txt.imports = imports
- txt.openImplicits = openImplicits
- txt.prefix = prefix
- txt.inConstructorSuffix = inConstructorSuffix
- txt.returnsSeen = returnsSeen
- txt.reportGeneralErrors = reportGeneralErrors
- txt.checking = checking
- txt.retyping = retyping
- txt.savedTypeBounds = savedTypeBounds
- txt
- }
- override def equals(that: Any): Boolean = that match {
- case that: AnyRef if this eq that => true
- case that => super.equals(that)
- }
-
def undetparams = _undetparams
def undetparams_=(ps: List[Symbol]) = {
//System.out.println("undetparams = " + ps);//debug
@@ -184,11 +149,7 @@ trait Contexts { self: Analyzer =>
c.owner = owner
c.scope = scope
- c.outer = intern(this)
- def internIf(txt : Context) = {
- if (txt eq this) c.outer // already interned!
- else txt
- }
+ c.outer = this
tree match {
case Template(_, _, _) | PackageDef(_, _) =>
@@ -196,7 +157,7 @@ trait Contexts { self: Analyzer =>
c.prefix = c.owner.thisType
c.inConstructorSuffix = false
case _ =>
- c.enclClass = internIf(this.enclClass)
+ c.enclClass = this.enclClass
c.prefix =
if (c.owner != this.owner && c.owner.isTerm) NoPrefix
else this.prefix
@@ -206,7 +167,7 @@ trait Contexts { self: Analyzer =>
case DefDef(_, _, _, _, _, _) =>
c.enclMethod = c
case _ =>
- c.enclMethod = internIf(this.enclMethod)
+ c.enclMethod = this.enclMethod
}
c.variance = this.variance
c.depth = if (scope == this.scope) this.depth else this.depth + 1
@@ -291,18 +252,11 @@ trait Contexts { self: Analyzer =>
argContext
}
- //todo: remove
- def makeConstructorSuffixContext = {
- val c = make(tree)
- c.inConstructorSuffix = true
- c
- }
-
private def diagString =
if (diagnostic.isEmpty) ""
else diagnostic.mkString("\n","\n", "")
- def error(pos: Position, err: Error) {
+ def error(pos: Position, err: Throwable) {
val msg = err.getMessage() + diagString
if (reportGeneralErrors)
unit.error(pos, if (checking) "**** ERROR DURING INTERNAL CHECKING ****\n" + msg else msg)
@@ -371,7 +325,7 @@ trait Contexts { self: Analyzer =>
var c = this.enclClass
while (c != NoContext &&
!clazz.isNonBottomSubClass(c.owner) &&
- !(c.owner.isModuleClass && clazz.isNonBottomSubClass(c.owner.linkedClassOfModule)))
+ !(c.owner.isModuleClass && clazz.isNonBottomSubClass(c.owner.companionClass)))
c = c.outer.enclClass
c
}
@@ -530,30 +484,6 @@ trait Contexts { self: Analyzer =>
}
implicitsCache
}
- override def hashCode = {
- var hc = 0
- implicit def b2i(b : Boolean) = if (b) 1 else 0
- // assum enclClass/enclMethod/outer are all interned already.
- hc += tree.hashCodeStructure
- def f(txt : Context) = if (txt eq this) 0 else System.identityHashCode(txt)
- hc += f(enclClass)
- hc += f(enclMethod)
- hc += f(outer)
- hc += owner.hashCode
- hc += scope.hashCode
- hc += variance.hashCode
- hc += _undetparams.hashCode
- hc += depth
- hc += imports.hashCode
- hc += prefix.hashCode
- hc += inConstructorSuffix
- hc += checking
- hc += retyping
- hc += savedTypeBounds.hashCode
- hc += (if (unit eq null) 0 else unit.hashCode)
- hc
- }
-
}
class ImportInfo(val tree: Import, val depth: Int) {
/** The prefix expression */
@@ -603,23 +533,9 @@ trait Contexts { self: Analyzer =>
}
override def toString() = tree.toString()
-
- override def hashCode = tree.hashCodeStructure + depth
- override def equals(that : Any) = that match {
- case that : ImportInfo =>
- depth == that.depth && (tree equalsStructure that.tree)
- case _ => false
- }
}
case class ImportType(expr: Tree) extends Type {
- override def equals(that : Any) = that match {
- case ImportType(expr) => this.expr == expr
- case _ => false
- }
- override def hashCode = expr.hashCode
override def safeToString = "ImportType("+expr+")"
}
- protected def intern(txt : Context) = txt
-
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/DeVirtualize.scala b/src/compiler/scala/tools/nsc/typechecker/DeVirtualize.scala
index aead82283d..a2e9f10a5a 100644
--- a/src/compiler/scala/tools/nsc/typechecker/DeVirtualize.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/DeVirtualize.scala
@@ -137,7 +137,7 @@ abstract class DeVirtualize extends InfoTransform with TypingTransformers {
protected def factoryName(clazz: Symbol) =
atPhase(ownPhase) { newTermName("new$"+clazz.name) }
- /** Does `clazz' contaion virtual classes? */
+ /** Does `clazz' contain virtual classes? */
protected def containsVirtuals(clazz: Symbol) = clazz.info.decls.toList exists (_.isVirtualClass)
/** The inner classes that need factory methods in `clazz'
@@ -177,7 +177,7 @@ abstract class DeVirtualize extends InfoTransform with TypingTransformers {
/** The name of the field representing a constructor parameter of a virtual class */
protected def paramFieldName(clazz: Symbol, index: Int) = atPhase(ownPhase) {
- clazz.expandedName(newTermName("param$"+index))
+ nme.expandedName(newTermName("param$"+index), clazz)
}
/** The name of the field representing a constructor parameter of a virtual class */
@@ -252,7 +252,7 @@ abstract class DeVirtualize extends InfoTransform with TypingTransformers {
val parents2 = addOverriddenVirtuals(clazz) map {
c => typeRef(clazz.owner.thisType, c, typeParams map (_.tpe))
}
- mkTypeBounds(NothingClass.tpe, intersectionType(parents1 ::: parents2))
+ TypeBounds(NothingClass.tpe, intersectionType(parents1 ::: parents2))
}
}
}
@@ -297,7 +297,7 @@ abstract class DeVirtualize extends InfoTransform with TypingTransformers {
private def copyType(tpe: Type): Type = tpe match {
case MethodType(formals, restpe) => MethodType(formals, copyType(restpe))
case PolyType(List(), restpe) => PolyType(List(), copyType(restpe))
- case PolyType(_, _) => throw new Error("bad case: "+tpe)
+ case PolyType(_, _) => abort("bad case: "+tpe)
case _ => owner.thisType.memberType(abstractType(clazz))
}
def getInfo = copyType(clazz.primaryConstructor.tpe)
@@ -305,9 +305,9 @@ abstract class DeVirtualize extends InfoTransform with TypingTransformers {
factory
}
- def removeDuplicates(ts: List[Type]): List[Type] = ts match {
+ def distinct(ts: List[Type]): List[Type] = ts match {
case List() => List()
- case t :: ts1 => t :: removeDuplicates(ts1 filter (_.typeSymbol != t.typeSymbol))
+ case t :: ts1 => t :: distinct(ts1 filter (_.typeSymbol != t.typeSymbol))
}
/** The concrete class symbol VC$fix in the factory symbol (@see mkFactory)
@@ -333,7 +333,7 @@ abstract class DeVirtualize extends InfoTransform with TypingTransformers {
}
atPhase(ownPhase.next) {
val parents2 =
- removeDuplicates(parents1.flatMap(addOverriddenVirtuals))
+ distinct(parents1.flatMap(addOverriddenVirtuals))
.map(_.substSym(clazz.typeParams, factory.typeParams))
sym setInfo ClassInfoType(parents2, new Scope, cclazz)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
index 796d6f8134..7ca2ff81bb 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
@@ -97,7 +97,7 @@ abstract class Duplicators extends Analyzer {
val tpe2: Type = (new FixInvalidSyms)(tpe1)
val tpe3 = tpe2 match {
case TypeRef(_, sym, _) if (sym.owner == oldClassOwner) =>
- log("seeing " + sym.fullNameString + " from a different angle")
+ log("seeing " + sym.fullName + " from a different angle")
tpe2.asSeenFrom(newClassOwner.thisType, oldClassOwner)
case _ => tpe2
}
@@ -157,7 +157,7 @@ abstract class Duplicators extends Analyzer {
typed(ddef)
}
- /** Special typer method allowing for re-type checking trees. It expects a typed tree.
+ /** Special typer method for re-type checking trees. It expects a typed tree.
* Returns a typed tree that has fresh symbols for all definitions in the original tree.
*
* Each definition tree is visited and its symbol added to the invalidSyms map (except LabelDefs),
@@ -240,9 +240,28 @@ abstract class Duplicators extends Analyzer {
log("changed " + tree + " to " + tree1)
super.typed(atPos(tree.pos)(tree1))
+ case Match(scrut, cases) =>
+ val scrut1 = typed(scrut, EXPRmode | BYVALmode, WildcardType)
+ val scrutTpe = scrut1.tpe.widen
+ val cases1 = if (scrutTpe.isFinalType) cases filter {
+ case CaseDef(Bind(_, pat @ Typed(_, tpt)), EmptyTree, body) =>
+ // the typed pattern is not incompatible with the scrutinee type
+ scrutTpe.matchesPattern(fixType(tpt.tpe))
+ case CaseDef(Typed(_, tpt), EmptyTree, body) =>
+ // the typed pattern is not incompatible with the scrutinee type
+ scrutTpe.matchesPattern(fixType(tpt.tpe))
+ case _ => true
+ } else cases
+
+ super.typed(atPos(tree.pos)(Match(scrut, cases1)), mode, pt)
+
+ case EmptyTree =>
+ // no need to do anything, in particular, don't set the type to null, EmptyTree.tpe_= asserts
+ tree
+
case _ =>
if (tree.hasSymbol && tree.symbol != NoSymbol && (tree.symbol.owner == definitions.AnyClass)) {
- tree.symbol = NoSymbol // maybe we can find a more specific member in a subclass of Any
+ tree.symbol = NoSymbol // maybe we can find a more specific member in a subclass of Any (see AnyVal members, like ==)
}
tree.tpe = null
super.typed(tree, mode, pt)
diff --git a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
index 586c21a31d..5b79662014 100644
--- a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
@@ -26,7 +26,7 @@ trait EtaExpansion { self: Analyzer =>
}
def unapply(tree: Tree): Option[(List[ValDef], Tree, List[Tree])] = tree match {
- case Function(vparams, Apply(fn, args)) if (vparams, args).zipped forall isMatch =>
+ case Function(vparams, Apply(fn, args)) if (vparams corresponds args)(isMatch) => // @PP: corresponds
Some((vparams, fn, args))
case _ =>
None
@@ -60,7 +60,7 @@ trait EtaExpansion { self: Analyzer =>
// Martin to Sean: I removed the
// else if (n == 0) branch and changed `n' in the line above to `(cnt - 1)'
// this was necessary because otherwise curried eta-expansions would get the same
- // symbol. An example which failes test/files/run/Course-2002-02.scala
+ // symbol. An example which fails test/files/run/Course-2002-02.scala
// todo: review and get rid of the `n' argument (which is unused right now).
}
// { cnt = cnt + 1; newTermName("eta$" + cnt) }
@@ -114,9 +114,7 @@ trait EtaExpansion { self: Analyzer =>
* @return ...
*/
def expand(tree: Tree, tpe: Type): Tree = tpe match {
- case mt: ImplicitMethodType =>
- tree
- case MethodType(paramSyms, restpe) =>
+ case mt @ MethodType(paramSyms, restpe) if !mt.isImplicit =>
val params = paramSyms map (sym =>
ValDef(Modifiers(SYNTHETIC | PARAM),
sym.name, TypeTree(sym.tpe) , EmptyTree))
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index 0539b4ee17..4c07e50da9 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -12,7 +12,7 @@ package scala.tools.nsc
package typechecker
import scala.collection.mutable.{LinkedHashMap, ListBuffer}
-import scala.tools.nsc.util.{ HashSet, Position, Set, NoPosition, SourceFile }
+import scala.tools.nsc.util.{HashSet, Set, SourceFile}
import symtab.Flags._
import util.Statistics._
@@ -173,7 +173,7 @@ self: Analyzer =>
object HasMethodMatching {
def apply(name: Name, argtpes: List[Type], restpe: Type): Type = {
def templateArgType(argtpe: Type) =
- new BoundedWildcardType(mkTypeBounds(argtpe, AnyClass.tpe))
+ new BoundedWildcardType(TypeBounds(argtpe, AnyClass.tpe))
val dummyMethod = new TermSymbol(NoSymbol, NoPosition, "typer$dummy")
val mtpe = MethodType(dummyMethod.newSyntheticValueParams(argtpes map templateArgType), restpe)
memberWildcardType(name, mtpe)
@@ -249,7 +249,7 @@ self: Analyzer =>
* by replacing variables by their upper bounds,
* - all remaining free type parameters in the type are replaced by WildcardType.
* The _complexity_ of a stripped core type corresponds roughly to the number of
- * nodes in its ast, except that singleton types are widened befoe taking the complexity.
+ * nodes in its ast, except that singleton types are widened before taking the complexity.
* Two types overlap if they have the same type symbol, or
* if one or both are intersection types with a pair of overlapiing parent types.
*/
@@ -302,10 +302,25 @@ self: Analyzer =>
if (isView) {
val found = pt.typeArgs(0)
val req = pt.typeArgs(1)
- typeErrorMsg(found, req)+
- "\nNote that implicit conversions are not applicable because they are ambiguous:\n "+
- coreMsg+"are possible conversion functions from "+ found+" to "+req
- } else {
+
+ /** A nice spot to explain some common situations a little
+ * less confusingly.
+ */
+ def explanation = {
+ if ((found =:= AnyClass.tpe) && (AnyRefClass.tpe <:< req))
+ "Note: Any is not implicitly converted to AnyRef. You can safely\n" +
+ "pattern match x: AnyRef or cast x.asInstanceOf[AnyRef] to do so."
+ else if ((found <:< AnyValClass.tpe) && (AnyRefClass.tpe <:< req))
+ "Note: primitive types are not implicitly converted to AnyRef.\n" +
+ "You can safely force boxing by casting x.asInstanceOf[AnyRef]."
+ else
+ "Note that implicit conversions are not applicable because they are ambiguous:\n "+
+ coreMsg+"are possible conversion functions from "+ found+" to "+req
+ }
+
+ typeErrorMsg(found, req) + "\n" + explanation
+ }
+ else {
"ambiguous implicit values:\n "+coreMsg + "match expected type "+pt
})
}
@@ -371,7 +386,7 @@ self: Analyzer =>
* or method type whose result type has a method whose name and type
* correspond to the HasMethodMatching type,
* or otherwise if `tp' is compatible with `pt'.
- * This methid is performance critical: 5-8% of typechecking time.
+ * This method is performance critical: 5-8% of typechecking time.
*/
def matchesPt(tp: Type, pt: Type, undet: List[Symbol]) = {
val start = startTimer(matchesPtNanos)
@@ -387,9 +402,9 @@ self: Analyzer =>
result
}
- def matchesPtView(tp: Type, ptarg: Type, ptres: Type, undet: List[Symbol]): Boolean = tp match {
- case MethodType(params, restpe) =>
- if (tp.isInstanceOf[ImplicitMethodType]) matchesPtView(restpe, ptarg, ptres, undet)
+ def matchesPtView(tp: Type, ptarg: Type, ptres: Type, undet: List[Symbol]): Boolean = tp match {
+ case mt @ MethodType(params, restpe) =>
+ if (mt.isImplicit) matchesPtView(restpe, ptarg, ptres, undet)
else params.length == 1 && matchesArgRes(params.head.tpe, restpe, ptarg, ptres, undet)
case ExistentialType(tparams, qtpe) =>
matchesPtView(normalize(tp), ptarg, ptres, undet)
@@ -465,24 +480,17 @@ self: Analyzer =>
// filter out failures from type inference, don't want to remove them from undetParams!
// we must be conservative in leaving type params in undetparams
- val uninstantiated = new ListBuffer[Symbol]
- val detargs = adjustTypeArgs(undetParams, targs, WildcardType, uninstantiated) // prototype == WildcardType: want to remove all inferred Nothing's
- // even if Nothing was inferred correctly, it's okay to ignore it (if it was the only solution, we'll infer it again next time)
- val (okParams, okArgs) = (undetParams zip detargs) filter {case (p, a) => !uninstantiated.contains(p)} unzip
- // TODO: optimise above line(s?) once `zipped filter` works (oh, the irony! this line is needed to get Zipped to type check...)
-
+ val (okParams, okArgs, _) = adjustTypeArgs(undetParams, targs) // prototype == WildcardType: want to remove all inferred Nothing's
val subst = new TreeTypeSubstituter(okParams, okArgs)
subst traverse itree2
// #2421b: since type inference (which may have been performed during implicit search)
// does not check whether inferred arguments meet the bounds of the corresponding parameter (see note in solvedTypes),
// must check again here:
- itree2 match { // roughly equivalent to typed1(itree2, EXPRmode, wildPt),
- // since typed1 only forces checking of the outer tree and calls typed on the subtrees
- // (they have already been type checked, by the typed1(itree...) above, so the subtrees are skipped by typed)
- // inlining the essential bit here for clarity
- //TODO: verify that these subtrees don't need re-checking
+ // TODO: I would prefer to just call typed instead of duplicating the code here, but this is probably a hotspot (and you can't just call typed, need to force re-typecheck)
+ itree2 match {
case TypeApply(fun, args) => typedTypeApply(itree2, EXPRmode, fun, args)
+ case Apply(TypeApply(fun, args), _) => typedTypeApply(itree2, EXPRmode, fun, args) // t2421c
case _ =>
}
@@ -515,7 +523,7 @@ self: Analyzer =>
* - the symbol's definition comes before, and does not contain the closest enclosing definition,
* - the symbol's definition is a val, var, or def with an explicit result type
* The aim of this method is to prevent premature cyclic reference errors
- * by computing the types of only those implicitis for which one of these
+ * by computing the types of only those implicits for which one of these
* conditions is true.
*/
def isValid(sym: Symbol) = {
@@ -715,7 +723,7 @@ self: Analyzer =>
val buf = new ListBuffer[List[ImplicitInfo]]
for ((clazz, pre) <- partMap) {
if (pre != NoType) {
- val companion = clazz.linkedModuleOfClass
+ val companion = clazz.companionModule
companion.moduleClass match {
case mc: ModuleClassSymbol =>
buf += (mc.implicitMembers map (im =>
@@ -776,6 +784,7 @@ self: Analyzer =>
inferImplicit(tree, appliedType(manifestClass.typeConstructor, List(tp)), true, false, context).tree
def findSubManifest(tp: Type) = findManifest(tp, if (full) FullManifestClass else OptManifestClass)
+ def findElemManifest(tp: Type) = findManifest(tp, if (full) FullManifestClass else PartialManifestClass)
def mot(tp0: Type): Tree = {
val tp1 = tp0.normalize
@@ -789,16 +798,22 @@ self: Analyzer =>
findSingletonManifest(sym.name.toString)
} else if (sym == ObjectClass || sym == AnyRefClass) {
findSingletonManifest("Object")
+ } else if (sym == RepeatedParamClass || sym == ByNameParamClass) {
+ EmptyTree
} else if (sym == ArrayClass && args.length == 1) {
- manifestFactoryCall("arrayType", args.head, findSubManifest(args.head))
+ manifestFactoryCall("arrayType", args.head, findElemManifest(args.head))
} else if (sym.isClass) {
val suffix = gen.mkClassOf(tp1) :: (args map findSubManifest)
manifestFactoryCall(
"classType", tp,
(if ((pre eq NoPrefix) || pre.typeSymbol.isStaticOwner) suffix
else findSubManifest(pre) :: suffix): _*)
- } else if (sym.isAbstractType) {
- if (sym.isExistential)
+ } else {
+ EmptyTree
+/* the following is dropped because it is dangerous
+ *
+ if (sym.isAbstractType) {
+ if (sym.isExistentiallyBound)
EmptyTree // todo: change to existential parameter manifest
else if (sym.isTypeParameterOrSkolem)
EmptyTree // a manifest should have been found by normal searchImplicit
@@ -818,11 +833,13 @@ self: Analyzer =>
}
} else {
EmptyTree // a manifest should have been found by normal searchImplicit
+*/
}
case RefinedType(parents, decls) =>
// refinement is not generated yet
if (parents.length == 1) findManifest(parents.head)
- else manifestFactoryCall("intersectionType", tp, parents map (findSubManifest(_)): _*)
+ else if (full) manifestFactoryCall("intersectionType", tp, parents map (findSubManifest(_)): _*)
+ else mot(erasure.erasure.intersectionDominator(parents))
case ExistentialType(tparams, result) =>
existentialAbstraction(tparams, result) match {
case ExistentialType(_, _) => mot(result)
@@ -897,7 +914,7 @@ self: Analyzer =>
def allImplicits: List[SearchResult] = {
val invalidImplicits = new ListBuffer[Symbol]
def search(iss: List[List[ImplicitInfo]], isLocal: Boolean) =
- applicableInfos(iss, isLocal, invalidImplicits).valuesIterator.toList
+ applicableInfos(iss, isLocal, invalidImplicits).values.toList
search(context.implicitss, true) ::: search(implicitsOfExpectedType, false)
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index 621b6dce11..c0b54cb4f0 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -6,9 +6,9 @@
package scala.tools.nsc
package typechecker
-import scala.tools.nsc.util.{Position, NoPosition}
+
import scala.collection.mutable.ListBuffer
-import scala.util.control.ControlException
+import scala.util.control.ControlThrowable
import symtab.Flags._
/** This trait ...
@@ -68,21 +68,15 @@ trait Infer {
if (nformals == 1 && actuals.length != 1 && actuals.length <= definitions.MaxTupleArity && !phase.erasedTypes)
List(atPos(pos)(gen.mkTuple(actuals))) else actuals
- /** A fresh type varable with given type parameter as origin.
+ /** A fresh type variable with given type parameter as origin.
*
* @param tparam ...
* @return ...
*/
def freshVar(tparam: Symbol): TypeVar = TypeVar(tparam)
- //todo: remove comments around following privates; right now they cause an IllegalAccess
- // error when built with scalac
-
- /*private*/
- class NoInstance(msg: String) extends RuntimeException(msg) with ControlException
-
- /*private*/
- class DeferredNoInstance(getmsg: () => String) extends NoInstance("") {
+ private class NoInstance(msg: String) extends Throwable(msg) with ControlThrowable { }
+ private class DeferredNoInstance(getmsg: () => String) extends NoInstance("") {
override def getMessage(): String = getmsg()
}
@@ -168,15 +162,17 @@ trait Infer {
for (tvar <- tvars)
if (tvar.constr.inst == tvar)
if (tvar.origin.typeSymbol.info eq ErrorType) {
- // this can happen if during solving a cyclic type paramater
+ // this can happen if during solving a cyclic type parameter
// such as T <: T gets completed. See #360
tvar.constr.inst = ErrorType
} else assert(false, tvar.origin+" at "+tvar.origin.typeSymbol.owner)
tvars map instantiate
}
- def skipImplicit(tp: Type) =
- if (tp.isInstanceOf[ImplicitMethodType]) tp.resultType else tp
+ def skipImplicit(tp: Type) = tp match {
+ case mt: MethodType if mt.isImplicit => mt.resultType
+ case _ => tp
+ }
/** Automatically perform the following conversions on expression types:
* A method type becomes the corresponding function type.
@@ -185,8 +181,8 @@ trait Infer {
* This method seems to be performance critical.
*/
def normalize(tp: Type): Type = tp match {
- case MethodType(params, restpe) if (!restpe.isDependent) =>
- if (tp.isInstanceOf[ImplicitMethodType]) normalize(restpe)
+ case mt @ MethodType(params, restpe) if (!restpe.isDependent) =>
+ if (mt.isImplicit) normalize(restpe)
else functionType(params map (_.tpe), normalize(restpe))
case PolyType(List(), restpe) => // nullary method type
normalize(restpe)
@@ -392,17 +388,13 @@ trait Infer {
}
}
- def isPlausiblyPopulated(tp1: Type, tp2: Type): Boolean = true
-
def isPlausiblyCompatible(tp: Type, pt: Type): Boolean = tp match {
case PolyType(_, restpe) =>
isPlausiblyCompatible(restpe, pt)
- case mt: ImplicitMethodType =>
- isPlausiblyCompatible(mt.resultType, pt)
case ExistentialType(tparams, qtpe) =>
isPlausiblyCompatible(qtpe, pt)
- case MethodType(params, restpe) =>
- if (tp.isInstanceOf[ImplicitMethodType]) isPlausiblyCompatible(restpe, pt)
+ case mt @ MethodType(params, restpe) =>
+ if (mt.isImplicit) isPlausiblyCompatible(restpe, pt)
else pt match {
case TypeRef(pre, sym, args) =>
if (sym.isAliasType) {
@@ -438,12 +430,7 @@ trait Infer {
else tp2 match {
case TypeRef(_, sym2, _) =>
if (sym2.isAliasType) isPlausiblySubType(tp1, tp2.dealias)
- else if (!sym2.isClass) true
- else if (sym1 isSubClass sym2) true
- else
- isNumericValueClass(sym1) &&
- isNumericValueClass(sym2) &&
- (sym1 == sym2 || numericWidth(sym1) < numericWidth(sym2))
+ else !sym2.isClass || (sym1 isSubClass sym2) || isNumericSubClass(sym1, sym2)
case _ =>
true
}
@@ -457,8 +444,8 @@ trait Infer {
}
final def normSubType(tp: Type, pt: Type): Boolean = tp match {
- case MethodType(params, restpe) =>
- if (tp.isInstanceOf[ImplicitMethodType]) normSubType(restpe, pt)
+ case mt @ MethodType(params, restpe) =>
+ if (mt.isImplicit) normSubType(restpe, pt)
else pt match {
case TypeRef(pre, sym, args) =>
if (sym.isAliasType) {
@@ -517,8 +504,8 @@ trait Infer {
def isCoercible(tp: Type, pt: Type): Boolean = false
- def isCompatibleArgs(tps: List[Type], pts: List[Type]): Boolean =
- (tps, pts).zipped forall isCompatibleArg
+ def isCompatibleArgs(tps: List[Type], pts: List[Type]) =
+ (tps corresponds pts)(isCompatibleArg) // @PP: corresponds
/* -- Type instantiation------------------------------------------------ */
@@ -560,10 +547,10 @@ trait Infer {
if (checkCompat(restpe.instantiateTypeParams(tparams, tvars), pt)) {
try {
// If the restpe is an implicit method, and the expected type is fully defined
- // optimze type varianbles wrt to the implicit formals only; ignore the result type.
+ // optimze type variables wrt to the implicit formals only; ignore the result type.
// See test pos/jesper.scala
val varianceType = restpe match {
- case mt: ImplicitMethodType if isFullyDefined(pt) =>
+ case mt: MethodType if mt.isImplicit && isFullyDefined(pt) =>
MethodType(mt.params, AnyClass.tpe)
case _ =>
restpe
@@ -637,26 +624,39 @@ trait Infer {
* Retracted parameters are collected in `uninstantiated`.
*
* Rewrite for repeated param types: Map T* entries to Seq[T].
+ * @return (okTparams, okArgs, leftUndet)
+ * * okTparams, okArgs: lists of tparam symbols and their inferred types
+ * * leftUndet a list of remaining uninstantiated type parameters after inference
+ * (type parameters mapped by the constraint solver to `scala.Nothing'
+ * and not covariant in <code>restpe</code> are taken to be
+ * uninstantiated. Maps all those type arguments to their
+ * corresponding type parameters).
*/
- def adjustTypeArgs(tparams: List[Symbol], targs: List[Type], restpe: Type, uninstantiated: ListBuffer[Symbol]): List[Type] = {
+ def adjustTypeArgs(tparams: List[Symbol], targs: List[Type], restpe: Type = WildcardType): (List[Symbol], List[Type], List[Symbol]) = {
@inline def notCovariantIn(tparam: Symbol, restpe: Type) =
(varianceInType(restpe)(tparam) & COVARIANT) == 0 // tparam occurred non-covariantly (in invariant or contravariant position)
- (tparams, targs).zipped map { (tparam, targ) =>
+ val leftUndet = new ListBuffer[Symbol]
+ val okParams = new ListBuffer[Symbol]
+ val okArgs = new ListBuffer[Type]
+
+ (tparams, targs).zipped foreach { (tparam, targ) =>
if (targ.typeSymbol == NothingClass &&
(isWildcard(restpe) || notCovariantIn(tparam, restpe))) {
- uninstantiated += tparam
- tparam.tpeHK //@M tparam.tpe was wrong: we only want the type constructor,
- // not the type constructor applied to dummy arguments
- // see ticket 474 for an example that crashes if we use .tpe instead of .tpeHK)
- } else if (targ.typeSymbol == RepeatedParamClass) {
- targ.baseType(SeqClass)
- } else if (targ.typeSymbol == JavaRepeatedParamClass) {
- targ.baseType(ArrayClass)
+ leftUndet += tparam
+ // don't add anything to okArgs, it'll be filtered out later anyway
+ // used `tparam.tpeHK` as dummy before
} else {
- targ.widen
+ okParams += tparam
+ okArgs += (
+ if (targ.typeSymbol == RepeatedParamClass) targ.baseType(SeqClass)
+ else if (targ.typeSymbol == JavaRepeatedParamClass) targ.baseType(ArrayClass)
+ else targ.widen
+ )
}
}
+
+ (okParams.toList, okArgs.toList, leftUndet.toList)
}
/** Return inferred type arguments, given type parameters, formal parameters,
@@ -672,17 +672,18 @@ trait Infer {
* @param restp the result type of the method
* @param argtpes the argument types of the application
* @param pt the expected return type of the application
- * @param uninstantiated a listbuffer receiving all uninstantiated type parameters
- * (type parameters mapped by the constraint solver to `scala.All'
+ * @return (okTparams, okArgs, leftUndet)
+ * * okTparams, okArgs: lists of tparam symbols and their inferred types
+ * * leftUndet a list of remaining uninstantiated type parameters after inference
+ * (type parameters mapped by the constraint solver to `scala.Nothing'
* and not covariant in <code>restpe</code> are taken to be
* uninstantiated. Maps all those type arguments to their
* corresponding type parameters).
- * @return ...
+
* @throws NoInstance
*/
def methTypeArgs(tparams: List[Symbol], formals: List[Type], restpe: Type,
- argtpes: List[Type], pt: Type,
- uninstantiated: ListBuffer[Symbol]): List[Type] = {
+ argtpes: List[Type], pt: Type): (List[Symbol], List[Type], List[Symbol]) = {
val tvars = tparams map freshVar
if (inferInfo)
println("methTypeArgs tparams = "+tparams+
@@ -690,7 +691,6 @@ trait Infer {
", restpe = "+restpe+
", argtpes = "+argtpes+
", pt = "+pt+
- ", uninstantiated = "+uninstantiated+
", tvars = "+tvars+" "+(tvars map (_.constr)))
if (formals.length != argtpes.length) {
throw new NoInstance("parameter lists differ in length")
@@ -735,7 +735,7 @@ trait Infer {
val targs = solvedTypes(tvars, tparams, tparams map varianceInTypes(formals),
false, lubDepth(formals) max lubDepth(argtpes))
// val res =
- adjustTypeArgs(tparams, targs, restpe, uninstantiated)
+ adjustTypeArgs(tparams, targs, restpe)
// println("meth type args "+", tparams = "+tparams+", formals = "+formals+", restpe = "+restpe+", argtpes = "+argtpes+", underlying = "+(argtpes map (_.widen))+", pt = "+pt+", uninstantiated = "+uninstantiated.toList+", result = "+res) //DEBUG
// res
}
@@ -848,11 +848,10 @@ trait Infer {
isCompatibleArgs(argtpes, formals) && isWeaklyCompatible(restpe, pt)
} else {
try {
- val uninstantiated = new ListBuffer[Symbol]
- val targs = methTypeArgs(undetparams, formals, restpe, argtpes, pt, uninstantiated)
- // #2665: must use weak conformance, not regular one (follow the monorphic case above)
- (exprTypeArgs(uninstantiated.toList, restpe.instantiateTypeParams(undetparams, targs), pt, isWeaklyCompatible) ne null) &&
- isWithinBounds(NoPrefix, NoSymbol, undetparams, targs)
+ val (okparams, okargs, leftUndet) = methTypeArgs(undetparams, formals, restpe, argtpes, pt)
+ // #2665: must use weak conformance, not regular one (follow the monomorphic case above)
+ (exprTypeArgs(leftUndet, restpe.instantiateTypeParams(okparams, okargs), pt, isWeaklyCompatible) ne null) &&
+ isWithinBounds(NoPrefix, NoSymbol, okparams, okargs)
} catch {
case ex: NoInstance => false
}
@@ -935,7 +934,7 @@ trait Infer {
case et: ExistentialType =>
isAsSpecific(ftpe1.skolemizeExistential, ftpe2)
//et.withTypeVars(isAsSpecific(_, ftpe2))
- case mt: ImplicitMethodType =>
+ case mt: MethodType if mt.isImplicit =>
isAsSpecific(ftpe1.resultType, ftpe2)
case MethodType(params @ (x :: xs), _) =>
var argtpes = params map (_.tpe)
@@ -943,7 +942,7 @@ trait Infer {
argtpes = argtpes map (argtpe =>
if (isRepeatedParamType(argtpe)) argtpe.typeArgs.head else argtpe)
isApplicable(List(), ftpe2, argtpes, WildcardType)
- case PolyType(tparams, mt: ImplicitMethodType) =>
+ case PolyType(tparams, mt: MethodType) if mt.isImplicit =>
isAsSpecific(PolyType(tparams, mt.resultType), ftpe2)
case PolyType(_, MethodType(params @ (x :: xs), _)) =>
isApplicable(List(), ftpe2, params map (_.tpe), WildcardType)
@@ -955,12 +954,10 @@ trait Infer {
alts forall (alt => isAsSpecific(ftpe1, pre.memberType(alt)))
case et: ExistentialType =>
et.withTypeVars(isAsSpecific(ftpe1, _))
- case mt: ImplicitMethodType =>
- isAsSpecific(ftpe1, mt.resultType)
- case PolyType(tparams, mt: ImplicitMethodType) =>
- isAsSpecific(ftpe1, PolyType(tparams, mt.resultType))
- case MethodType(_, _) | PolyType(_, MethodType(_, _)) =>
- true
+ case mt: MethodType =>
+ !mt.isImplicit || isAsSpecific(ftpe1, mt.resultType)
+ case PolyType(tparams, mt: MethodType) =>
+ !mt.isImplicit || isAsSpecific(ftpe1, PolyType(tparams, mt.resultType))
case _ =>
isAsSpecificValueType(ftpe1, ftpe2, List(), List())
}
@@ -992,7 +989,7 @@ trait Infer {
ftpe1.isError || {
val specificCount = (if (isAsSpecific(ftpe1, ftpe2)) 1 else 0) -
(if (isAsSpecific(ftpe2, ftpe1) &&
- // todo: move to isAsSepecific test
+ // todo: move to isAsSpecific test
// (!ftpe2.isInstanceOf[OverloadedType] || ftpe1.isInstanceOf[OverloadedType]) &&
(!phase.erasedTypes || covariantReturnOverride(ftpe1, ftpe2))) 1 else 0)
val subClassCount = (if (isInProperSubClassOrObject(sym1, sym2)) 1 else 0) -
@@ -1245,15 +1242,14 @@ trait Infer {
" tparams = "+tparams+"\n"+
" pt = "+pt)
val targs = exprTypeArgs(tparams, tree.tpe, pt)
- val uninstantiated = new ListBuffer[Symbol]
- val detargs = if (keepNothings || (targs eq null)) targs //@M: adjustTypeArgs fails if targs==null, neg/t0226
- else adjustTypeArgs(tparams, targs, WildcardType, uninstantiated)
- val undetparams = uninstantiated.toList
- val detparams = tparams filterNot (undetparams contains _)
- substExpr(tree, detparams, detargs, pt)
- if (inferInfo)
- println("inferred expr instance "+tree+", detargs = "+detargs+", undetparams = "+undetparams)
- undetparams
+ val (okParams, okArgs, leftUndet) = // TODO AM: is this pattern match too expensive? should we push it down into the else of the if below?
+ if (keepNothings || (targs eq null)) (tparams, targs, List()) //@M: adjustTypeArgs fails if targs==null, neg/t0226
+ else adjustTypeArgs(tparams, targs)
+
+ if (inferInfo) println("inferred expr instance for "+ tree +" --> (okParams, okArgs, leftUndet)= "+(okParams, okArgs, leftUndet))
+
+ substExpr(tree, okParams, okArgs, pt)
+ leftUndet
}
/** Substitite free type variables `undetparams' of polymorphic argument
@@ -1297,13 +1293,12 @@ trait Infer {
val formals = formalTypes(params0 map (_.tpe), args.length)
val argtpes = actualTypes(args map (_.tpe.deconst), formals.length)
val restpe = fn.tpe.resultType(argtpes)
- val uninstantiated = new ListBuffer[Symbol]
- val targs = methTypeArgs(undetparams, formals, restpe, argtpes, pt, uninstantiated)
- checkBounds(fn.pos, NoPrefix, NoSymbol, undetparams, targs, "inferred ")
- val treeSubst = new TreeTypeSubstituter(undetparams, targs)
+ val (okparams, okargs, leftUndet) = methTypeArgs(undetparams, formals, restpe, argtpes, pt)
+ checkBounds(fn.pos, NoPrefix, NoSymbol, okparams, okargs, "inferred ")
+ val treeSubst = new TreeTypeSubstituter(okparams, okargs)
treeSubst.traverse(fn)
treeSubst.traverseTrees(args)
- uninstantiated.toList
+ leftUndet
} catch {
case ex: NoInstance =>
errorTree(fn,
@@ -1418,7 +1413,7 @@ trait Infer {
if (lo <:< hi) {
if (!((lo <:< tparam.info.bounds.lo) && (tparam.info.bounds.hi <:< hi))) {
context.nextEnclosing(_.tree.isInstanceOf[CaseDef]).pushTypeBounds(tparam)
- tparam setInfo mkTypeBounds(lo, hi)
+ tparam setInfo TypeBounds(lo, hi)
if (settings.debug.value) log("new bounds of " + tparam + " = " + tparam.info)
} else {
if (settings.debug.value) log("redundant: "+tparam+" "+tparam.info+"/"+lo+" "+hi)
@@ -1498,6 +1493,11 @@ trait Infer {
def inferTypedPattern(pos: Position, pattp: Type, pt0: Type): Type = {
val pt = widen(pt0)
+
+ /** If we can absolutely rule out a match we can fail fast. */
+ if (pt.isFinalType && !(pt matchesPattern pattp))
+ error(pos, "scrutinee is incompatible with pattern type"+foundReqMsg(pattp, pt))
+
checkCheckable(pos, pattp, "pattern ")
if (!(pattp <:< pt)) {
val tpparams = freeTypeParamsOfTerms.collect(pattp)
@@ -1511,24 +1511,11 @@ trait Infer {
if (settings.debug.value) log("free type params (2) = " + ptparams)
val ptvars = ptparams map freshVar
val pt1 = pt.instantiateTypeParams(ptparams, ptvars)
- if (!(isPopulated(tp, pt1) && isInstantiatable(tvars ::: ptvars))) {
- // In ticket #2486 we have this example of code which would fail
- // here without a change:
- //
- // class A[T]
- // class B extends A[Int]
- // class C[T] extends A[T] { def f(t: A[T]) = t match { case x: B => () } }
- //
- // This reports error: pattern type is incompatible with expected type;
- // found : B
- // required: A[T]
- //
- // I am not sure what is the ideal fix, but for the moment I am intercepting
- // it at the last minute and applying a looser check before failing.
- if (!isPlausiblyCompatible(pattp, pt)) {
- error(pos, "pattern type is incompatible with expected type"+foundReqMsg(pattp, pt))
- return pattp
- }
+ // See ticket #2486 we have this example of code which would incorrectly
+ // fail without verifying that !(pattp matchesPattern pt)
+ if (!(isPopulated(tp, pt1) && isInstantiatable(tvars ::: ptvars)) && !(pattp matchesPattern pt)) {
+ error(pos, "pattern type is incompatible with expected type"+foundReqMsg(pattp, pt))
+ return pattp
}
ptvars foreach instantiateTypeVar
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index ae8fd3a956..9ac628034f 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -8,7 +8,6 @@ package scala.tools.nsc
package typechecker
import scala.collection.mutable.HashMap
-import scala.tools.nsc.util.Position
import symtab.Flags
import symtab.Flags._
@@ -29,7 +28,7 @@ trait Namers { self: Analyzer =>
case TypeRef(pre, sym, args)
if (sym.isTypeSkolem && (tparams contains sym.deSkolemize)) =>
// println("DESKOLEMIZING "+sym+" in "+sym.owner)
- mapOver(rawTypeRef(NoPrefix, sym.deSkolemize, args))
+ mapOver(TypeRef(NoPrefix, sym.deSkolemize, args))
/*
case PolyType(tparams1, restpe) =>
new DeSkolemizeMap(tparams1 ::: tparams).mapOver(tp)
@@ -76,7 +75,7 @@ trait Namers { self: Analyzer =>
}
def inConstructorFlag: Long =
- if (context.owner.isConstructor && !context.inConstructorSuffix || context.owner.isEarly) INCONSTRUCTOR
+ if (context.owner.isConstructor && !context.inConstructorSuffix || context.owner.isEarlyInitialized) INCONSTRUCTOR
else 0l
def moduleClassFlags(moduleFlags: Long) =
@@ -91,10 +90,10 @@ trait Namers { self: Analyzer =>
if (sym.isModule && sym.moduleClass != NoSymbol)
updatePosFlags(sym.moduleClass, pos, moduleClassFlags(flags))
if (sym.owner.isPackageClass &&
- (sym.linkedSym.rawInfo.isInstanceOf[loaders.SymbolLoader] ||
- sym.linkedSym.rawInfo.isComplete && runId(sym.validTo) != currentRunId))
+ (sym.companionSymbol.rawInfo.isInstanceOf[loaders.SymbolLoader] ||
+ sym.companionSymbol.rawInfo.isComplete && runId(sym.validTo) != currentRunId))
// pre-set linked symbol to NoType, in case it is not loaded together with this symbol.
- sym.linkedSym.setInfo(NoType)
+ sym.companionSymbol.setInfo(NoType)
sym
}
@@ -127,6 +126,7 @@ trait Namers { self: Analyzer =>
unsafeTypeParams foreach(sym => paramContext.scope.enter(sym))
newNamer(paramContext)
}
+
def usePrimary = sym.isTerm && (
(sym hasFlag PARAMACCESSOR) ||
((sym hasFlag PARAM) && sym.owner.isPrimaryConstructor)
@@ -197,7 +197,7 @@ trait Namers { self: Analyzer =>
def enterClassSymbol(tree : ClassDef): Symbol = {
var c: Symbol = context.scope.lookup(tree.name)
- if (c.isType && c.owner.isPackageClass && context.scope == c.owner.info.decls && !currentRun.compiles(c)) {
+ if (c.isType && c.owner.isPackageClass && context.scope == c.owner.info.decls && currentRun.canRedefine(c)) {
updatePosFlags(c, tree.pos, tree.mods.flags)
setPrivateWithin(tree, c, tree.mods)
} else {
@@ -214,7 +214,7 @@ trait Namers { self: Analyzer =>
}
clazz.sourceFile = file
if (clazz.sourceFile ne null) {
- assert(!currentRun.compiles(clazz) || clazz.sourceFile == currentRun.symSource(c));
+ assert(currentRun.canRedefine(clazz) || clazz.sourceFile == currentRun.symSource(c));
currentRun.symSource(c) = clazz.sourceFile
}
}
@@ -229,9 +229,12 @@ trait Namers { self: Analyzer =>
var m: Symbol = context.scope.lookup(tree.name)
val moduleFlags = tree.mods.flags | MODULE | FINAL
if (m.isModule && !m.isPackage && inCurrentScope(m) &&
- (!currentRun.compiles(m) || (m hasFlag SYNTHETIC))) {
+ (currentRun.canRedefine(m) || (m hasFlag SYNTHETIC))) {
updatePosFlags(m, tree.pos, moduleFlags)
setPrivateWithin(tree, m, tree.mods)
+ if (m.moduleClass != NoSymbol)
+ setPrivateWithin(tree, m.moduleClass, tree.mods)
+
context.unit.synthetics -= m
} else {
m = context.owner.newModule(tree.pos, tree.name)
@@ -288,8 +291,11 @@ trait Namers { self: Analyzer =>
def ensureCompanionObject(tree: ClassDef, creator: => Tree): Symbol = {
val m: Symbol = context.scope.lookup(tree.name.toTermName).filter(! _.isSourceMethod)
if (m.isModule && inCurrentScope(m) && currentRun.compiles(m)) m
- else enterSyntheticSym(creator)
+ else
+ /*util.trace("enter synthetic companion object for "+currentRun.compiles(m)+":")*/(
+ enterSyntheticSym(creator))
}
+
private def enterSymFinishWith(tree: Tree, tparams: List[TypeDef]) {
val sym = tree.symbol
if (settings.debug.value) log("entered " + sym + " in " + context.owner + ", scope-id = " + context.scope.hashCode());
@@ -304,19 +310,21 @@ trait Namers { self: Analyzer =>
ltype = new PolyTypeCompleter(tparams, ltype, tree, sym, context) //@M
if (sym.isTerm) skolemize(tparams)
}
- def copyIsSynthetic() = sym.owner.info.member(nme.copy).hasFlag(SYNTHETIC)
- if (sym.name == nme.copy && sym.hasFlag(SYNTHETIC) ||
- sym.name.startsWith(nme.copy + "$default$") && copyIsSynthetic()){
- // the 'copy' method of case classes needs a special type completer to make bug0054.scala (and others)
- // work. the copy method has to take exactly the same parameter types as the primary constructor.
+
+ if (sym.name == nme.copy || sym.name.startsWith(nme.copy + "$default$")) {
+ // it could be a compiler-generated copy method or one of its default getters
setInfo(sym)(mkTypeCompleter(tree)(copySym => {
- val constrType = copySym.owner.primaryConstructor.tpe
- val subst = new SubstSymMap(copySym.owner.typeParams, tparams map (_.symbol))
- for ((params, cparams) <- tree.asInstanceOf[DefDef].vparamss.zip(constrType.paramss);
- (param, cparam) <- params.zip(cparams)) {
- // need to clone the type cparam.tpe??? problem is: we don't have the new owner yet (the new param symbol)
- param.tpt.setType(subst(cparam.tpe))
- () // @LUC TODO workaround for #1996
+ def copyIsSynthetic() = sym.owner.info.member(nme.copy).hasFlag(SYNTHETIC)
+ if (sym.hasFlag(SYNTHETIC) && (!sym.hasFlag(DEFAULTPARAM) || copyIsSynthetic())) {
+ // the 'copy' method of case classes needs a special type completer to make bug0054.scala (and others)
+ // work. the copy method has to take exactly the same parameter types as the primary constructor.
+ val constrType = copySym.owner.primaryConstructor.tpe
+ val subst = new SubstSymMap(copySym.owner.typeParams, tparams map (_.symbol))
+ for ((params, cparams) <- tree.asInstanceOf[DefDef].vparamss.zip(constrType.paramss);
+ (param, cparam) <- params.zip(cparams)) {
+ // need to clone the type cparam.tpe??? problem is: we don't have the new owner yet (the new param symbol)
+ param.tpt.setType(subst(cparam.tpe))
+ }
}
ltype.complete(sym)
}))
@@ -685,7 +693,7 @@ trait Namers { self: Analyzer =>
sym => TypeRef(clazz.owner.thisType, sym, clazz.typeParams map (_.tpe))))
println("Parents of "+clazz+":"+parents)
- // check that virtual classses are only defined as members of templates
+ // check that virtual classes are only defined as members of templates
if (clazz.isVirtualClass && !clazz.owner.isClass)
context.error(
clazz.pos,
@@ -718,14 +726,14 @@ trait Namers { self: Analyzer =>
// add the copy method to case classes; this needs to be done here, not in SyntheticMethods, because
// the namer phase must traverse this copy method to create default getters for its parameters.
- Namers.this.caseClassOfModuleClass get clazz.linkedModuleOfClass.moduleClass match {
+ Namers.this.caseClassOfModuleClass get clazz.companionModule.moduleClass match {
case Some(cdef) =>
def hasCopy(decls: Scope) = {
decls.iterator exists (_.name == nme.copy)
}
if (!hasCopy(decls) &&
!parents.exists(p => hasCopy(p.typeSymbol.info.decls)) &&
- !parents.flatMap(_.baseClasses).removeDuplicates.exists(bc => hasCopy(bc.info.decls)))
+ !parents.flatMap(_.baseClasses).distinct.exists(bc => hasCopy(bc.info.decls)))
addCopyMethod(cdef, templateNamer)
case None =>
}
@@ -835,9 +843,7 @@ trait Namers { self: Analyzer =>
val params = vparams map (vparam =>
if (meth hasFlag JAVA) vparam.setInfo(objToAny(vparam.tpe)) else vparam)
val restpe1 = convertToDeBruijn(vparams, 1)(restpe) // new dependent types: replace symbols in restpe with the ones in vparams
- if (!vparams.isEmpty && vparams.head.hasFlag(IMPLICIT))
- ImplicitMethodType(params, restpe1)
- else if (meth hasFlag JAVA) JavaMethodType(params, restpe1)
+ if (meth hasFlag JAVA) JavaMethodType(params, restpe1)
else MethodType(params, restpe1)
}
@@ -852,7 +858,7 @@ trait Namers { self: Analyzer =>
val site = meth.owner.thisType
def overriddenSymbol = intersectionType(meth.owner.info.parents).member(meth.name).filter(sym => {
- // luc: added .syubstSym from skolemized to deSkolemized
+ // luc: added .substSym from skolemized to deSkolemized
// site.memberType(sym): PolyType(tparams, MethodType(..., ...)) ==> all references to tparams are deSkolemized
// thisMethodType: tparams in PolyType are deSkolemized, the references in the MethodTypes are skolemized. ==> the two didn't match
// for instance, B.foo would not override A.foo, and the default on parameter b would not be inherited
@@ -944,7 +950,7 @@ trait Namers { self: Analyzer =>
// match empty and missing parameter list
if (vparamss.isEmpty && baseParamss == List(Nil)) baseParamss = Nil
if (vparamss == List(Nil) && baseParamss.isEmpty) baseParamss = List(Nil)
- assert(!overrides || vparamss.length == baseParamss.length, ""+ meth.fullNameString + ", "+ overridden.fullNameString)
+ assert(!overrides || vparamss.length == baseParamss.length, ""+ meth.fullName + ", "+ overridden.fullName)
var ownerNamer: Option[Namer] = None
var moduleNamer: Option[(ClassDef, Namer)] = None
@@ -955,7 +961,7 @@ trait Namers { self: Analyzer =>
// denotes the parameter lists which are on the left side of the current one. these get added
// to the default getter. Example: "def foo(a: Int)(b: Int = a)" gives "foo$default$1(a: Int) = a"
(List[List[ValDef]]() /: (vparamss))((previous: List[List[ValDef]], vparams: List[ValDef]) => {
- assert(!overrides || vparams.length == baseParamss.head.length, ""+ meth.fullNameString + ", "+ overridden.fullNameString)
+ assert(!overrides || vparams.length == baseParamss.head.length, ""+ meth.fullName + ", "+ overridden.fullName)
var baseParams = if (overrides) baseParamss.head else Nil
for (vparam <- vparams) {
val sym = vparam.symbol
@@ -977,7 +983,7 @@ trait Namers { self: Analyzer =>
val parentNamer = if (isConstr) {
val (cdef, nmr) = moduleNamer.getOrElse {
- val module = meth.owner.linkedModuleOfClass
+ val module = meth.owner.companionModule
module.initialize // call type completer (typedTemplate), adds the
// module's templateNamer to classAndNamerOfModule
val (cdef, nmr) = classAndNamerOfModule(module)
@@ -1030,12 +1036,10 @@ trait Namers { self: Analyzer =>
}
meth.owner.resetFlag(INTERFACE) // there's a concrete member now
val default = parentNamer.enterSyntheticSym(defaultTree)
- sym.defaultGetter = default
} else if (baseHasDefault) {
// the parameter does not have a default itself, but the corresponding parameter
// in the base class does.
sym.setFlag(DEFAULTPARAM)
- sym.defaultGetter = baseParams.head.defaultGetter
}
posCounter += 1
if (overrides) baseParams = baseParams.tail
@@ -1103,30 +1107,42 @@ trait Namers { self: Analyzer =>
caseClassCopyMeth(cdef) foreach (namer.enterSyntheticSym(_))
}
+
def typeSig(tree: Tree): Type = {
- val sym: Symbol = tree.symbol
- // For definitions, transform Annotation trees to AnnotationInfos, assign
- // them to the sym's annotations. Type annotations: see Typer.typedAnnotated
-
- // We have to parse definition annotatinos here (not in the typer when traversing
- // the MemberDef tree): the typer looks at annotations of certain symbols; if
- // they were added only in typer, depending on the compilation order, they would
- // be visible or not
- val annotated = if (sym.isModule) sym.moduleClass else sym
- // typeSig might be called multiple times, e.g. on a ValDef: val, getter, setter
- // parse the annotations only once.
- if (!annotated.isInitialized) tree match {
- case defn: MemberDef =>
- val ainfos = defn.mods.annotations filter { _ != null } map { ann =>
- // need to be lazy, #1782
- LazyAnnotationInfo(() => typer.typedAnnotation(ann))
- }
- if (!ainfos.isEmpty)
- annotated.setAnnotations(ainfos)
- if (annotated.isTypeSkolem)
- annotated.deSkolemize.setAnnotations(ainfos)
- case _ =>
+
+ /** For definitions, transform Annotation trees to AnnotationInfos, assign
+ * them to the sym's annotations. Type annotations: see Typer.typedAnnotated
+ * We have to parse definition annotations here (not in the typer when traversing
+ * the MemberDef tree): the typer looks at annotations of certain symbols; if
+ * they were added only in typer, depending on the compilation order, they would
+ * be visible or not
+ */
+ def annotate(annotated: Symbol) = {
+ // typeSig might be called multiple times, e.g. on a ValDef: val, getter, setter
+ // parse the annotations only once.
+ if (!annotated.isInitialized) tree match {
+ case defn: MemberDef =>
+ val ainfos = defn.mods.annotations filter { _ != null } map { ann =>
+ // need to be lazy, #1782
+ LazyAnnotationInfo(() => typer.typedAnnotation(ann))
+ }
+ if (!ainfos.isEmpty)
+ annotated.setAnnotations(ainfos)
+ if (annotated.isTypeSkolem)
+ annotated.deSkolemize.setAnnotations(ainfos)
+ case _ =>
+ }
}
+
+ val sym: Symbol = tree.symbol
+
+ // @Lukas: I am not sure this is the right way to do things.
+ // We used to only decorate the module class with annotations, which is
+ // clearly wrong. Now we decorate both the class and the object.
+ // But maybe some annotations are only meant for one of these but not for the other?
+ annotate(sym)
+ if (sym.isModule) annotate(sym.moduleClass)
+
val result =
try {
tree match {
@@ -1143,7 +1159,7 @@ trait Namers { self: Analyzer =>
newNamer(context.makeNewScope(tree, sym)).methodSig(mods, tparams, vparamss, tpt, rhs)
case vdef @ ValDef(mods, name, tpt, rhs) =>
- val typer1 = typer.constrTyperIf(sym.hasFlag(PARAM | PRESUPER) && sym.owner.isConstructor)
+ val typer1 = typer.constrTyperIf(sym.hasFlag(PARAM | PRESUPER) && !mods.hasFlag(JAVA) && sym.owner.isConstructor)
if (tpt.isEmpty) {
if (rhs.isEmpty) {
context.error(tpt.pos, "missing parameter type");
@@ -1185,23 +1201,38 @@ trait Namers { self: Analyzer =>
}
true
}
+
+ def isValidSelector(from: Name)(fun : => Unit) {
+ if (base.nonLocalMember(from) == NoSymbol &&
+ base.nonLocalMember(from.toTypeName) == NoSymbol) fun
+ }
+
def checkSelectors(selectors: List[ImportSelector]): Unit = selectors match {
case ImportSelector(from, _, to, _) :: rest =>
if (from != nme.WILDCARD && base != ErrorType) {
- if (base.nonLocalMember(from) == NoSymbol &&
- base.nonLocalMember(from.toTypeName) == NoSymbol)
- context.error(tree.pos, from.decode + " is not a member of " + expr)
+ isValidSelector(from) {
+ if (currentRun.compileSourceFor(expr, from))
+ return typeSig(tree)
+ // for Java code importing Scala objects
+ if (from.endsWith(nme.DOLLARraw))
+ isValidSelector(from.subName(0, from.length -1)) {
+ context.error(tree.pos, from.decode + " is not a member of " + expr)
+ }
+ else
+ context.error(tree.pos, from.decode + " is not a member of " + expr)
+ }
if (checkNotRedundant(tree.pos, from, to))
checkNotRedundant(tree.pos, from.toTypeName, to.toTypeName)
}
if (from != nme.WILDCARD && (rest.exists (sel => sel.name == from)))
- context.error(tree.pos, from.decode + " is renamed twice");
+ context.error(tree.pos, from.decode + " is renamed twice")
if ((to ne null) && to != nme.WILDCARD && (rest exists (sel => sel.rename == to)))
- context.error(tree.pos, to.decode + " appears twice as a target of a renaming");
+ context.error(tree.pos, to.decode + " appears twice as a target of a renaming")
checkSelectors(rest)
case Nil =>
}
+
checkSelectors(selectors)
ImportType(expr1)
}
@@ -1214,11 +1245,11 @@ trait Namers { self: Analyzer =>
result match {
case PolyType(tparams, restpe)
if (!tparams.isEmpty && tparams.head.owner.isTerm ||
- // Adriaan: The added conditon below is quite a hack. It seems that HK type parameters is relying
+ // Adriaan: The added condition below is quite a hack. It seems that HK type parameters is relying
// on a pass that forces all infos in the type to get everything right.
// The problem is that the same pass causes cyclic reference errors in
// test pos/cyclics.scala. It turned out that deSkolemize is run way more often than necessary,
- // ruinning it only when needed fixes the cuclic reference errors.
+ // running it only when needed fixes the cyclic reference errors.
// But correcting deSkolemize broke HK types, because we don't do the traversal anymore.
// For the moment I made a special hack to do the traversal if we have HK type parameters.
// Maybe it's not a hack, then we need to document it better. But ideally, we should find
diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
index 3009873735..b5c5bc95ae 100644
--- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
@@ -7,7 +7,6 @@
package scala.tools.nsc
package typechecker
-import scala.tools.nsc.util.Position
import symtab.Flags._
import scala.collection.mutable.ListBuffer
@@ -101,7 +100,7 @@ trait NamesDefaults { self: Analyzer =>
import context.unit
/**
- * Transform a function into a block, and assing context.namedApplyBlockInfo to
+ * Transform a function into a block, and passing context.namedApplyBlockInfo to
* the new block as side-effect.
*
* `baseFun' is typed, the resulting block must be typed as well.
@@ -180,7 +179,7 @@ trait NamesDefaults { self: Analyzer =>
}
def moduleQual(pos: Position, tree: Symbol => Tree) = {
- val module = baseFun.symbol.owner.linkedModuleOfClass
+ val module = baseFun.symbol.owner.companionModule
if (module == NoSymbol) None
else Some(atPos(pos.focus)(tree(module)))
}
@@ -341,14 +340,15 @@ trait NamesDefaults { self: Analyzer =>
* the argument list (y = "lt") is transformed to (y = "lt", x = foo$default$1())
*/
def addDefaults(givenArgs: List[Tree], qual: Option[Tree], targs: List[Tree],
- previousArgss: List[List[Tree]], params: List[Symbol], pos: util.Position): (List[Tree], List[Symbol]) = {
+ previousArgss: List[List[Tree]], params: List[Symbol],
+ pos: util.Position, context: Context): (List[Tree], List[Symbol]) = {
if (givenArgs.length < params.length) {
val (missing, positional) = missingParams(givenArgs, params)
if (missing forall (_.hasFlag(DEFAULTPARAM))) {
val defaultArgs = missing map (p => {
var default1 = qual match {
- case Some(q) => gen.mkAttributedSelect(q.duplicate, p.defaultGetter)
- case None => gen.mkAttributedRef(p.defaultGetter)
+ case Some(q) => gen.mkAttributedSelect(q.duplicate, defaultGetter(p, context))
+ case None => gen.mkAttributedRef(defaultGetter(p, context))
}
default1 = if (targs.isEmpty) default1
else TypeApply(default1, targs.map(_.duplicate))
@@ -365,6 +365,38 @@ trait NamesDefaults { self: Analyzer =>
}
/**
+ * For a parameter with default argument, find the method symbol of
+ * the default getter.
+ */
+ def defaultGetter(param: Symbol, context: Context) = {
+ val i = param.owner.paramss.flatten.findIndexOf(p => p.name == param.name) + 1
+ if (i > 0) {
+ if (param.owner.isConstructor) {
+ val defGetterName = "init$default$"+ i
+ param.owner.owner.companionModule.info.member(defGetterName)
+ } else {
+ val defGetterName = param.owner.name +"$default$"+ i
+ if (param.owner.owner.isClass) {
+ param.owner.owner.info.member(defGetterName)
+ } else {
+ // the owner of the method is another method. find the default
+ // getter in the context.
+ var res: Symbol = NoSymbol
+ var ctx = context
+ while(res == NoSymbol && ctx.outer != ctx) {
+ val s = ctx.scope.lookup(defGetterName)
+ if (s != NoSymbol && s.owner == param.owner.owner)
+ res = s
+ else
+ ctx = ctx.outer
+ }
+ res
+ }
+ }
+ } else NoSymbol
+ }
+
+ /**
* Removes name assignments from args. Additionally, returns an array mapping
* argument indicies from call-site-order to definition-site-order.
*
@@ -392,7 +424,7 @@ trait NamesDefaults { self: Analyzer =>
} else if (argPos contains pos) {
errorTree(arg, "parameter specified twice: "+ name)
} else {
- // for named arguments, check wether the assignment expression would
+ // for named arguments, check whether the assignment expression would
// typecheck. if it does, report an ambiguous error.
val param = params(pos)
val paramtpe = params(pos).tpe.cloneInfo(param)
@@ -408,6 +440,8 @@ trait NamesDefaults { self: Analyzer =>
case _ => super.apply(tp)
}
}
+ val reportAmbiguousErrors = typer.context.reportAmbiguousErrors
+ typer.context.reportAmbiguousErrors = false
val res = typer.silent(_.typed(arg, subst(paramtpe))) match {
case _: TypeError =>
// if the named argument is on the original parameter
@@ -425,6 +459,7 @@ trait NamesDefaults { self: Analyzer =>
errorTree(arg, "reference to "+ name +" is ambiguous; it is both, a parameter\n"+
"name of the method and the name of a variable currently in scope.")
}
+ typer.context.reportAmbiguousErrors = reportAmbiguousErrors
//@M note that we don't get here when an ambiguity was detected (during the computation of res),
// as errorTree throws an exception
typer.context.undetparams = udp
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index 7b41c7b249..003a173892 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -10,7 +10,6 @@ package typechecker
import symtab.Flags._
import collection.mutable.{HashSet, HashMap}
import transform.InfoTransform
-import scala.tools.nsc.util.{Position, NoPosition}
import scala.collection.mutable.ListBuffer
/** <p>
@@ -191,7 +190,7 @@ abstract class RefChecks extends InfoTransform {
case List(MixinOverrideError(_, msg)) =>
unit.error(clazz.pos, msg)
case MixinOverrideError(member, msg) :: others =>
- val others1 = others.map(_.member.name.decode).filter(member.name.decode != _).removeDuplicates
+ val others1 = others.map(_.member.name.decode).filter(member.name.decode != _).distinct
unit.error(
clazz.pos,
msg+(if (others1.isEmpty) ""
@@ -385,7 +384,7 @@ abstract class RefChecks extends InfoTransform {
val opc = new overridingPairs.Cursor(clazz)
while (opc.hasNext) {
- //Console.println(opc.overriding/* + ":" + opc.overriding.tpe*/ + " in "+opc.overriding.fullNameString + " overrides " + opc.overridden/* + ":" + opc.overridden.tpe*/ + " in "+opc.overridden.fullNameString + "/"+ opc.overridden.hasFlag(DEFERRED));//debug
+ //Console.println(opc.overriding/* + ":" + opc.overriding.tpe*/ + " in "+opc.overriding.fullName + " overrides " + opc.overridden/* + ":" + opc.overridden.tpe*/ + " in "+opc.overridden.fullName + "/"+ opc.overridden.hasFlag(DEFERRED));//debug
if (!opc.overridden.isClass) checkOverride(clazz, opc.overriding, opc.overridden);
opc.next
@@ -493,7 +492,7 @@ abstract class RefChecks extends InfoTransform {
/** validate all base types of a class in reverse linear order. */
def register(tp: Type) {
-// if (clazz.fullNameString.endsWith("Collection.Projection"))
+// if (clazz.fullName.endsWith("Collection.Projection"))
// println("validate base type "+tp)
val baseClass = tp.typeSymbol
if (baseClass.isClass) {
@@ -852,7 +851,7 @@ abstract class RefChecks extends InfoTransform {
val ownerTransformer = new ChangeOwnerTraverser(vsym, lazyDefSym)
val lazyDef = atPos(tree.pos)(
DefDef(lazyDefSym, ownerTransformer(
- if (tree.symbol.owner.isTrait // for traits, this is further tranformed in mixins
+ if (tree.symbol.owner.isTrait // for traits, this is further transformed in mixins
|| hasUnitType) rhs
else Block(List(
Assign(gen.mkAttributedRef(vsym), rhs)),
@@ -903,7 +902,7 @@ abstract class RefChecks extends InfoTransform {
val clazz = pat.tpe.typeSymbol;
clazz == seltpe.typeSymbol &&
clazz.isClass && (clazz hasFlag CASE) &&
- ((args, clazz.primaryConstructor.tpe.asSeenFrom(seltpe, clazz).paramTypes).zipped forall isIrrefutable)
+ (args corresponds clazz.primaryConstructor.tpe.asSeenFrom(seltpe, clazz).paramTypes)(isIrrefutable) // @PP: corresponds
case Typed(pat, tpt) =>
seltpe <:< tpt.tpe
case Ident(nme.WILDCARD) =>
@@ -923,11 +922,18 @@ abstract class RefChecks extends InfoTransform {
if (sym.isDeprecated && !currentOwner.ownerChain.exists(_.isDeprecated)) {
val dmsg = sym.deprecationMessage
val msg = sym.toString + sym.locationString +" is deprecated"+
- (if (dmsg.isDefined) ": "+ dmsg.get
- else "")
+ (if (dmsg.isDefined) ": "+ dmsg.get else "")
unit.deprecationWarning(pos, msg)
}
}
+ /** Similar to deprecation: check if the symbol is marked with @migration
+ * indicating it has changed semantics between versions.
+ */
+ private def checkMigration(sym: Symbol, pos: Position) = {
+ for (msg <- sym.migrationMessage)
+ unit.warning(pos, "%s%s has changed semantics:\n%s".format(sym, sym.locationString, msg))
+ }
+
/** Check that a deprecated val or def does not override a
* concrete, non-deprecated method. If it does, then
* deprecation is meaningless.
@@ -1015,7 +1021,7 @@ abstract class RefChecks extends InfoTransform {
}
}
val newResult = localTyper.typedPos(tree.pos) {
- Apply(Apply(Select(gen.mkAttributedRef(ArrayModule), nme.ofDim), args), List(manif))
+ new ApplyToImplicitArgs(Apply(Select(gen.mkAttributedRef(ArrayModule), nme.ofDim), args), List(manif))
}
currentApplication = tree
newResult
@@ -1028,7 +1034,16 @@ abstract class RefChecks extends InfoTransform {
private def transformSelect(tree: Select): Tree = {
val Select(qual, name) = tree
val sym = tree.symbol
+
+ /** Note: if a symbol has both @deprecated and @migration annotations and both
+ * warnings are enabled, only the first one checked here will be emitted.
+ * I assume that's a consequence of some code trying to avoid noise by suppressing
+ * warnings after the first, but I think it'd be better if we didn't have to
+ * arbitrarily choose one as more important than the other.
+ */
checkDeprecated(sym, tree.pos)
+ if (settings.Xmigration28.value)
+ checkMigration(sym, tree.pos)
if (currentClass != sym.owner && (sym hasFlag LOCAL)) {
var o = currentClass
@@ -1064,106 +1079,109 @@ abstract class RefChecks extends InfoTransform {
}
}
- override def transform(tree: Tree): Tree = try {
+ override def transform(tree: Tree): Tree = {
val savedLocalTyper = localTyper
val savedCurrentApplication = currentApplication
- val sym = tree.symbol
+ try {
+ val sym = tree.symbol
- // Apply RefChecks to annotations. Makes sure the annotations conform to
- // type bounds (bug #935), issues deprecation warnings for symbols used
- // inside annotations.
- applyRefchecksToAnnotations(tree)
+ // Apply RefChecks to annotations. Makes sure the annotations conform to
+ // type bounds (bug #935), issues deprecation warnings for symbols used
+ // inside annotations.
+ applyRefchecksToAnnotations(tree)
+
+ var result: Tree = tree match {
+ case DefDef(mods, name, tparams, vparams, tpt, EmptyTree) if tree.symbol.hasAnnotation(NativeAttr) =>
+ tree.symbol.resetFlag(DEFERRED)
+ transform(treeCopy.DefDef(tree, mods, name, tparams, vparams, tpt,
+ typed(Apply(gen.mkAttributedRef(Predef_error), List(Literal("native method stub"))))))
+
+ case ValDef(_, _, _, _) | DefDef(_, _, _, _, _, _) =>
+ checkDeprecatedOvers(tree)
+ tree
+
+ case Template(parents, self, body) =>
+ localTyper = localTyper.atOwner(tree, currentOwner)
+ validateBaseTypes(currentOwner)
+ checkDefaultsInOverloaded(currentOwner)
+ val bridges = addVarargBridges(currentOwner)
+ checkAllOverrides(currentOwner)
+
+ if (bridges.nonEmpty) treeCopy.Template(tree, parents, self, body ::: bridges)
+ else tree
+
+ case TypeTree() =>
+ val existentialParams = new ListBuffer[Symbol]
+ doTypeTraversal(tree) { // check all bounds, except those that are
+ // existential type parameters
+ case ExistentialType(tparams, tpe) =>
+ existentialParams ++= tparams
+ case t: TypeRef =>
+ val exparams = existentialParams.toList
+ val wildcards = exparams map (_ => WildcardType)
+ checkTypeRef(t.subst(exparams, wildcards), tree.pos)
+ case _ =>
+ }
+ tree
- var result: Tree = tree match {
- case DefDef(mods, name, tparams, vparams, tpt, EmptyTree) if tree.symbol.hasAnnotation(NativeAttr) =>
- tree.symbol.resetFlag(DEFERRED)
- transform(treeCopy.DefDef(tree, mods, name, tparams, vparams, tpt,
- typed(Apply(gen.mkAttributedRef(Predef_error), List(Literal("native method stub"))))))
+ case TypeApply(fn, args) =>
+ checkBounds(NoPrefix, NoSymbol, fn.tpe.typeParams, args map (_.tpe), tree.pos)
+ transformCaseApply(tree, ())
- case ValDef(_, _, _, _) | DefDef(_, _, _, _, _, _) =>
- checkDeprecatedOvers(tree)
- tree
+ case x @ Apply(_, _) =>
+ transformApply(x)
- case Template(parents, self, body) =>
- localTyper = localTyper.atOwner(tree, currentOwner)
- validateBaseTypes(currentOwner)
- checkDefaultsInOverloaded(currentOwner)
- val bridges = addVarargBridges(currentOwner)
- checkAllOverrides(currentOwner)
-
- if (bridges.nonEmpty) treeCopy.Template(tree, parents, self, body ::: bridges)
- else tree
-
- case TypeTree() =>
- val existentialParams = new ListBuffer[Symbol]
- doTypeTraversal(tree) { // check all bounds, except those that are
- // existential type parameters
- case ExistentialType(tparams, tpe) =>
- existentialParams ++= tparams
- case t: TypeRef =>
- val exparams = existentialParams.toList
- val wildcards = exparams map (_ => WildcardType)
- checkTypeRef(t.subst(exparams, wildcards), tree.pos)
- case _ =>
- }
- tree
+ case x @ If(_, _, _) =>
+ transformIf(x)
- case TypeApply(fn, args) =>
- checkBounds(NoPrefix, NoSymbol, fn.tpe.typeParams, args map (_.tpe), tree.pos)
- transformCaseApply(tree, ())
+ case New(tpt) =>
+ enterReference(tree.pos, tpt.tpe.typeSymbol)
+ tree
- case x @ Apply(_, _) =>
- transformApply(x)
+ case Typed(expr, tpt @ Ident(name)) if name == nme.WILDCARD_STAR.toTypeName && !isRepeatedParamArg(tree) =>
+ unit.error(tree.pos, "no `: _*' annotation allowed here\n"+
+ "(such annotations are only allowed in arguments to *-parameters)")
+ tree
- case x @ If(_, _, _) =>
- transformIf(x)
+ case Ident(name) =>
+ transformCaseApply(tree,
+ if (name != nme.WILDCARD && name != nme.WILDCARD_STAR.toTypeName) {
+ assert(sym != NoSymbol, tree) //debug
+ enterReference(tree.pos, sym)
+ }
+ )
- case New(tpt) =>
- enterReference(tree.pos, tpt.tpe.typeSymbol)
- tree
+ case x @ Select(_, _) =>
+ transformSelect(x)
- case Typed(expr, tpt @ Ident(name)) if name == nme.WILDCARD_STAR.toTypeName && !isRepeatedParamArg(tree) =>
- unit.error(tree.pos, "no `: _*' annotation allowed here\n"+
- "(such annotations are only allowed in arguments to *-parameters)")
+ case _ => tree
+ }
+ result = result match {
+ case CaseDef(pat, guard, body) =>
+ inPattern = true
+ val pat1 = transform(pat)
+ inPattern = false
+ treeCopy.CaseDef(tree, pat1, transform(guard), transform(body))
+ case _ =>
+ super.transform(result)
+ }
+ result match {
+ case ClassDef(_, _, _, _)
+ | TypeDef(_, _, _, _) =>
+ if (result.symbol.isLocal || result.symbol.owner.isPackageClass)
+ varianceValidator.traverse(result)
+ case _ =>
+ }
+ result
+ } catch {
+ case ex: TypeError =>
+ if (settings.debug.value) ex.printStackTrace();
+ unit.error(tree.pos, ex.getMessage())
tree
-
- case Ident(name) =>
- transformCaseApply(tree,
- if (name != nme.WILDCARD && name != nme.WILDCARD_STAR.toTypeName) {
- assert(sym != NoSymbol, tree) //debug
- enterReference(tree.pos, sym)
- }
- )
-
- case x @ Select(_, _) =>
- transformSelect(x)
-
- case _ => tree
- }
- result = result match {
- case CaseDef(pat, guard, body) =>
- inPattern = true
- val pat1 = transform(pat)
- inPattern = false
- treeCopy.CaseDef(tree, pat1, transform(guard), transform(body))
- case _ =>
- super.transform(result)
+ } finally {
+ localTyper = savedLocalTyper
+ currentApplication = savedCurrentApplication
}
- result match {
- case ClassDef(_, _, _, _)
- | TypeDef(_, _, _, _) =>
- if (result.symbol.isLocal || result.symbol.owner.isPackageClass)
- varianceValidator.traverse(result)
- case _ =>
- }
- localTyper = savedLocalTyper
- currentApplication = savedCurrentApplication
- result
- } catch {
- case ex: TypeError =>
- if (settings.debug.value) ex.printStackTrace();
- unit.error(tree.pos, ex.getMessage())
- tree
}
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
index 257ab243b4..95c79eb97e 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
@@ -9,7 +9,6 @@ package typechecker
import scala.collection.mutable.ListBuffer
import symtab.Flags._
-import util.Position
/** This phase adds super accessors for all super calls that
* either appear in a trait or have as a target a member of some outer class.
@@ -27,6 +26,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
// inherits abstract value `global' and class `Phase' from Transform
import global._
+ import definitions.{ IntClass, UnitClass, ByNameParamClass, Any_asInstanceOf, Object_## }
/** the following two members override abstract members in Transform */
val phaseName: String = "superaccessors"
@@ -45,7 +45,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
private def transformArgs(args: List[Tree], params: List[Symbol]) =
((args, params).zipped map { (arg, param) =>
- if (param.tpe.typeSymbol == definitions.ByNameParamClass)
+ if (param.tpe.typeSymbol == ByNameParamClass)
withInvalidOwner { checkPackedConforms(transform(arg), param.tpe.typeArgs.head) }
else transform(arg)
}) :::
@@ -71,15 +71,21 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
other = linked.info.decl(sym.name.toTermName).filter(_.isModule)
if (other != NoSymbol)
unit.error(sym.pos, "name clash: "+sym.owner+" defines "+sym+
- "\nand its companion "+sym.owner.linkedModuleOfClass+" also defines "+
+ "\nand its companion "+sym.owner.companionModule+" also defines "+
other)
}
}
- private def transformSuperSelect(tree: Tree) = tree match {
- case Select(sup @ Super(_, mix), name) =>
+ private def transformSuperSelect(tree: Tree): Tree = tree match {
+ // Intercept super.## and translate it to this.##
+ // which is fine since it's final.
+ case Select(sup @ Super(_, _), nme.HASHHASH) =>
+ Select(gen.mkAttributedThis(sup.symbol), Object_##) setType IntClass.tpe
+
+ case Select(sup @ Super(_, mix), name) =>
val sym = tree.symbol
val clazz = sup.symbol
+
if (sym.isDeferred) {
val member = sym.overridingSymbol(clazz);
if (mix != nme.EMPTY.toTypeName || member == NoSymbol ||
@@ -339,7 +345,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
}
if (isDependentType) {
val preciseTpe = expectedTpe.asSeenFrom(singleType(NoPrefix, obj), ownerClass) //typeRef(singleType(NoPrefix, obj), v.tpe.symbol, List())
- TypeApply(Select(res, definitions.Any_asInstanceOf),
+ TypeApply(Select(res, Any_asInstanceOf),
List(TypeTree(preciseTpe)))
} else res
}
@@ -365,7 +371,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
if (protAcc == NoSymbol) {
protAcc = clazz.newMethod(field.pos, nme.protSetterName(field.originalName))
protAcc.setInfo(MethodType(protAcc.newSyntheticValueParams(List(clazz.typeOfThis, field.tpe)),
- definitions.UnitClass.tpe))
+ UnitClass.tpe))
clazz.info.decls.enter(protAcc)
val code = DefDef(protAcc, {
val obj :: value :: Nil = protAcc.paramss.head;
diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
index 9ae56f05a3..1e8b89cb6f 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
@@ -88,14 +88,16 @@ trait SyntheticMethods extends ast.TreeDSL {
typer typed { DEF(method) === LIT(nargs) }
}
- def productElementMethod(accs: List[Symbol]): Tree = {
- val symToTpe = makeTypeConstructor(List(IntClass.tpe), AnyClass.tpe)
- val method = syntheticMethod(nme.productElement, 0, symToTpe)
+ /** Common code for productElement and productElementName
+ */
+ def perElementMethod(accs: List[Symbol], methodName: Name, resType: Type, caseFn: Symbol => Tree): Tree = {
+ val symToTpe = makeTypeConstructor(List(IntClass.tpe), resType)
+ val method = syntheticMethod(methodName, 0, symToTpe)
val arg = method ARG 0
- val default = List( DEFAULT ==> THROW(IndexOutOfBoundsExceptionClass, arg) )
+ val default = List(DEFAULT ==> THROW(IndexOutOfBoundsExceptionClass, arg))
val cases =
for ((sym, i) <- accs.zipWithIndex) yield
- CASE(LIT(i)) ==> Ident(sym)
+ CASE(LIT(i)) ==> caseFn(sym)
typer typed {
DEF(method) === {
@@ -103,6 +105,11 @@ trait SyntheticMethods extends ast.TreeDSL {
}
}
}
+ def productElementMethod(accs: List[Symbol]): Tree =
+ perElementMethod(accs, nme.productElement, AnyClass.tpe, x => Ident(x))
+
+ def productElementNameMethod(accs: List[Symbol]): Tree =
+ perElementMethod(accs, nme.productElementName, StringClass.tpe, x => Literal(x.name.toString))
def moduleToStringMethod: Tree = {
val method = syntheticMethod(nme.toString_, FINAL, makeNoArgConstructor(StringClass.tpe))
@@ -173,15 +180,14 @@ trait SyntheticMethods extends ast.TreeDSL {
// returns (Apply, Bind)
def makeTrees(acc: Symbol, cpt: Type): (Tree, Bind) = {
- val varName = context.unit.fresh.newName(clazz.pos.focus, acc.name + "$")
- val (eqMethod, binding) =
- if (isRepeatedParamType(cpt))
- (TypeApply(varName DOT nme.sameElements, List(TypeTree(cpt.baseType(SeqClass).typeArgs.head))),
- Star(WILD()))
- else
- ((varName DOT nme.EQ): Tree,
- WILD())
- (eqMethod APPLY Ident(acc), varName BIND binding)
+ val varName = context.unit.fresh.newName(clazz.pos.focus, acc.name + "$")
+ val isRepeated = isRepeatedParamType(cpt)
+ val binding = if (isRepeated) Star(WILD()) else WILD()
+ val eqMethod: Tree =
+ if (isRepeated) gen.mkRuntimeCall(nme.sameElements, List(Ident(varName), Ident(acc)))
+ else (varName DOT nme.EQ)(Ident(acc))
+
+ (eqMethod, varName BIND binding)
}
// Creates list of parameters and a guard for each
@@ -265,6 +271,9 @@ trait SyntheticMethods extends ast.TreeDSL {
Product_productPrefix -> (() => productPrefixMethod),
Product_productArity -> (() => productArityMethod(accessors.length)),
Product_productElement -> (() => productElementMethod(accessors)),
+ // This is disabled pending a reimplementation which doesn't add any
+ // weight to case classes (i.e. inspects the bytecode.)
+ // Product_productElementName -> (() => productElementNameMethod(accessors)),
Product_canEqual -> (() => canEqualMethod)
)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
index cc8d39fc5f..c2b6e7adf2 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
@@ -8,7 +8,6 @@ package scala.tools.nsc
package typechecker
import scala.tools.nsc.symtab.Flags._
-import scala.tools.nsc.util.{Position, NoPosition}
abstract class TreeCheckers extends Analyzer {
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 1d6f1bddac..284b12e501 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -12,9 +12,9 @@ package scala.tools.nsc
package typechecker
import scala.collection.mutable.{HashMap, ListBuffer}
-import scala.util.control.ControlException
+import scala.util.control.ControlThrowable
import scala.tools.nsc.interactive.RangePositions
-import scala.tools.nsc.util.{ Position, Set, NoPosition, SourceFile, BatchSourceFile }
+import scala.tools.nsc.util.{Set, SourceFile, BatchSourceFile}
import symtab.Flags._
import util.Statistics
@@ -135,21 +135,31 @@ trait Typers { self: Analyzer =>
*/
val HKmode = 0x4000 // @M: could also use POLYmode | TAPPmode
+ /** The mode <code>BYVALmode</code> is set when we are typing an expression
+ * that occurs in a by-value position. An expression e1 is in by-value
+ * position within expression e2 iff it will be reduced to a value at that
+ * position during the evaluation of e2. Examples are by-value function
+ * arguments or the conditional of an if-then-else clause.
+ * This mode has been added to support continuations.
+ */
+ val BYVALmode = 0x8000
+
/** The mode <code>TYPEPATmode</code> is set when we are typing a type in a pattern
*/
val TYPEPATmode = 0x10000
private val stickyModes: Int = EXPRmode | PATTERNmode | TYPEmode | ALTmode
- private def funMode(mode: Int) = mode & (stickyModes | SCCmode) | FUNmode | POLYmode
+ private def funMode(mode: Int) = mode & (stickyModes | SCCmode) | FUNmode | POLYmode | BYVALmode
private def typeMode(mode: Int) =
if ((mode & (PATTERNmode | TYPEPATmode)) != 0) TYPEmode | TYPEPATmode
else TYPEmode
- private def argMode(fun: Tree, mode: Int) =
+ private def argMode(fun: Tree, mode: Int) = {
if (treeInfo.isSelfOrSuperConstrCall(fun)) mode | SCCmode
else mode
+ }
abstract class Typer(context0: Context) {
import context0.unit
@@ -198,7 +208,7 @@ trait Typers { self: Analyzer =>
s traverse fun
for (arg <- args) s traverse arg
}
- Apply(fun, args) setPos fun.pos
+ new ApplyToImplicitArgs(fun, args) setPos fun.pos
case ErrorType =>
fun
}
@@ -334,7 +344,7 @@ trait Typers { self: Analyzer =>
tp match {
case TypeRef(pre, sym, args) =>
(checkNotLocked(sym)) && (
- !sym.isTypeMember ||
+ !sym.isNonClassType ||
checkNonCyclic(pos, appliedType(pre.memberInfo(sym), args), sym) // @M! info for a type ref to a type parameter now returns a polytype
// @M was: checkNonCyclic(pos, pre.memberInfo(sym).subst(sym.typeParams, args), sym)
)
@@ -439,13 +449,13 @@ trait Typers { self: Analyzer =>
check(owner, scope, pt, tree setType tp1.typeSymbol.classBound)
else if (owner == NoSymbol)
tree setType packSymbols(hiddenSymbols.reverse, tp1)
- else { // privates
+ else if (!phase.erasedTypes) { // privates
val badSymbol = hiddenSymbols.head
error(tree.pos,
(if (badSymbol hasFlag PRIVATE) "private " else "") + badSymbol +
" escapes its defining scope as part of type "+tree.tpe)
setError(tree)
- }
+ } else tree
}
def addHidden(sym: Symbol) =
@@ -553,7 +563,7 @@ trait Typers { self: Analyzer =>
sym.rawInfo.isInstanceOf[loaders.ClassfileLoader] && {
sym.rawInfo.load(sym)
(sym.sourceFile ne null) &&
- (currentRun.compiledFiles contains sym.sourceFile)
+ (currentRun.compiledFiles contains sym.sourceFile.path)
}
}
@@ -632,16 +642,15 @@ trait Typers { self: Analyzer =>
* </ol>
*/
private def stabilize(tree: Tree, pre: Type, mode: Int, pt: Type): Tree = {
- def isNotAValue(sym: Symbol) = // bug #1392
- !sym.isValue || (sym.isModule && isValueClass(sym.linkedClassOfModule))
-
if (tree.symbol.hasFlag(OVERLOADED) && (mode & FUNmode) == 0)
inferExprAlternative(tree, pt)
val sym = tree.symbol
+
if (tree.tpe.isError) tree
else if ((mode & (PATTERNmode | FUNmode)) == PATTERNmode && tree.isTerm) { // (1)
- checkStable(tree)
- } else if ((mode & (EXPRmode | QUALmode)) == EXPRmode && isNotAValue(sym) && !phase.erasedTypes) { // (2)
+ if (sym.isValue) checkStable(tree)
+ else errorTree(tree, sym+" is not a value")
+ } else if ((mode & (EXPRmode | QUALmode)) == EXPRmode && !sym.isValue && !phase.erasedTypes) { // (2)
errorTree(tree, sym+" is not a value")
} else {
if (sym.isStable && pre.isStable && tree.tpe.typeSymbol != ByNameParamClass &&
@@ -768,7 +777,7 @@ trait Typers { self: Analyzer =>
* (5) Convert constructors in a pattern as follows:
* (5.1) If constructor refers to a case class factory, set tree's type to the unique
* instance of its primary constructor that is a subtype of the expected type.
- * (5.2) If constructor refers to an exractor, convert to application of
+ * (5.2) If constructor refers to an extractor, convert to application of
* unapply or unapplySeq method.
*
* (6) Convert all other types to TypeTree nodes.
@@ -781,7 +790,8 @@ trait Typers { self: Analyzer =>
* is an integer fitting in the range of that type, convert it to that type.
* (11) Widen numeric literals to their expected type, if necessary
* (12) When in mode EXPRmode, convert E to { E; () } if expected type is scala.Unit.
- * (13) When in mode EXPRmode, apply a view
+ * (13) When in mode EXPRmode, apply AnnotationChecker conversion if expected type is annotated.
+ * (14) When in mode EXPRmode, apply a view
* If all this fails, error
*/
protected def adapt(tree: Tree, mode: Int, pt: Type, original: Tree = EmptyTree): Tree = tree.tpe match {
@@ -817,7 +827,7 @@ trait Typers { self: Analyzer =>
TypeTree(tparam.tpeHK) setPos tree.pos.focus)) setPos tree.pos //@M/tcpolyinfer: changed tparam.tpe to tparam.tpeHK
context.undetparams = context.undetparams ::: tparams1
adapt(tree1 setType restpe.substSym(tparams, tparams1), mode, pt, original)
- case mt: ImplicitMethodType if ((mode & (EXPRmode | FUNmode | LHSmode)) == EXPRmode) => // (4.1)
+ case mt: MethodType if mt.isImplicit && ((mode & (EXPRmode | FUNmode | LHSmode)) == EXPRmode) => // (4.1)
if (!context.undetparams.isEmpty/* && (mode & POLYmode) == 0 disabled to make implicits in new collection work; we should revisit this. */) { // (9)
// println("adapt IMT: "+(context.undetparams, pt)) //@MDEBUG
context.undetparams = inferExprInstance(
@@ -888,7 +898,7 @@ trait Typers { self: Analyzer =>
!(tree.symbol.hasFlag(JAVA) && context.unit.isJava)) { // (7)
// @M When not typing a higher-kinded type ((mode & HKmode) == 0)
// or raw type (tree.symbol.hasFlag(JAVA) && context.unit.isJava), types must be of kind *,
- // and thus parameterised types must be applied to their type arguments
+ // and thus parameterized types must be applied to their type arguments
// @M TODO: why do kind-* tree's have symbols, while higher-kinded ones don't?
errorTree(tree, tree.symbol+" takes type parameters")
tree setType tree.tpe
@@ -977,13 +987,15 @@ trait Typers { self: Analyzer =>
return typed(atPos(tree.pos)(Block(List(tree), Literal(()))), mode, pt)
else if (isNumericValueClass(sym) && isNumericSubType(tree.tpe, pt))
return typed(atPos(tree.pos)(Select(tree, "to"+sym.name)), mode, pt)
+ case AnnotatedType(_, _, _) if canAdaptAnnotations(tree, mode, pt) => // (13)
+ return typed(adaptAnnotations(tree, mode, pt), mode, pt)
case _ =>
}
if (!context.undetparams.isEmpty) {
return instantiate(tree, mode, pt)
}
if (context.implicitsEnabled && !tree.tpe.isError && !pt.isError) {
- // (13); the condition prevents chains of views
+ // (14); the condition prevents chains of views
if (settings.debug.value) log("inferring view from "+tree.tpe+" to "+pt)
val coercion = inferView(tree, tree.tpe, pt, true)
// convert forward views of delegate types into closures wrapped around
@@ -997,7 +1009,7 @@ trait Typers { self: Analyzer =>
if (coercion != EmptyTree) {
if (settings.debug.value) log("inferred view from "+tree.tpe+" to "+pt+" = "+coercion+":"+coercion.tpe)
return newTyper(context.makeImplicit(context.reportAmbiguousErrors)).typed(
- Apply(coercion, List(tree)) setPos tree.pos, mode, pt)
+ new ApplyImplicitView(coercion, List(tree)) setPos tree.pos, mode, pt)
}
}
}
@@ -1027,6 +1039,7 @@ trait Typers { self: Analyzer =>
((qual.symbol eq null) || !qual.symbol.isTerm || qual.symbol.isValue) &&
phase.id <= currentRun.typerPhase.id && !qtpe.isError &&
qtpe.typeSymbol != NullClass && qtpe.typeSymbol != NothingClass && qtpe != WildcardType &&
+ !qual.isInstanceOf[ApplyImplicitView] && // don't chain views
context.implicitsEnabled) { // don't try to adapt a top-level type that's the subject of an implicit search
// this happens because, if isView, typedImplicit tries to apply the "current" implicit value to
// a value that needs to be coerced, so we check whether the implicit value has an `apply` method
@@ -1038,7 +1051,7 @@ trait Typers { self: Analyzer =>
}
val coercion = inferView(qual, qtpe, searchTemplate, true)
if (coercion != EmptyTree)
- typedQualifier(atPos(qual.pos)(Apply(coercion, List(qual))))
+ typedQualifier(atPos(qual.pos)(new ApplyImplicitView(coercion, List(qual))))
else
qual
} else {
@@ -1255,9 +1268,9 @@ trait Typers { self: Analyzer =>
/*
if (settings.Xshowcls.value != "" &&
- settings.Xshowcls.value == context.owner.fullNameString)
+ settings.Xshowcls.value == context.owner.fullName)
println("INFO "+context.owner+
- ", baseclasses = "+(context.owner.info.baseClasses map (_.fullNameString))+
+ ", baseclasses = "+(context.owner.info.baseClasses map (_.fullName))+
", lin = "+(context.owner.info.baseClasses map (context.owner.thisType.baseType)))
*/
}
@@ -1314,7 +1327,7 @@ trait Typers { self: Analyzer =>
// attributes(mdef)
// initialize all constructors of the linked class: the type completer (Namer.methodSig)
// might add default getters to this object. example: "object T; class T(x: Int = 1)"
- val linkedClass = mdef.symbol.linkedClassOfModule
+ val linkedClass = mdef.symbol.companionClass
if (linkedClass != NoSymbol)
for (c <- linkedClass.info.decl(nme.CONSTRUCTOR).alternatives)
c.initialize
@@ -1476,6 +1489,8 @@ trait Typers { self: Analyzer =>
*/
def typedTemplate(templ: Template, parents1: List[Tree]): Template = {
val clazz = context.owner
+ // complete lazy annotations
+ val annots = clazz.annotations
if (templ.symbol == NoSymbol)
templ setSymbol clazz.newLocalDummy(templ.pos)
val self1 = templ.self match {
@@ -1490,7 +1505,7 @@ trait Typers { self: Analyzer =>
// val tpt1 = checkNoEscaping.privates(clazz.thisSym, typedType(tpt))
// treeCopy.ValDef(vd, mods, name, tpt1, EmptyTree) setType NoType
// but this leads to cycles for existential self types ==> #2545
- if (self1.name != nme.WILDCARD) context.scope enter self1.symbol
+ if (self1.name != nme.WILDCARD) context.scope enter self1.symbol
val selfType =
if (clazz.isAnonymousClass && !phase.erasedTypes)
intersectionType(clazz.info.parents, clazz.owner)
@@ -1530,6 +1545,9 @@ trait Typers { self: Analyzer =>
val typer1 = constrTyperIf(sym.hasFlag(PARAM) && sym.owner.isConstructor)
val typedMods = removeAnnotations(vdef.mods)
+ // complete lazy annotations
+ val annots = sym.annotations
+
var tpt1 = checkNoEscaping.privates(sym, typer1.typedType(vdef.tpt))
checkNonCyclic(vdef, tpt1)
if (sym.hasAnnotation(definitions.VolatileAttr)) {
@@ -1545,7 +1563,7 @@ trait Typers { self: Analyzer =>
vdef.rhs
} else {
val tpt2 = if (sym hasFlag DEFAULTPARAM) {
- // When typechecking default parameter, replace all type parameters in the expected type by Wildcarad.
+ // When typechecking default parameter, replace all type parameters in the expected type by Wildcard.
// This allows defining "def foo[T](a: T = 1)"
val tparams =
if (sym.owner.isConstructor) sym.owner.owner.info.typeParams
@@ -1558,11 +1576,11 @@ trait Typers { self: Analyzer =>
}
// allow defaults on by-name parameters
if (sym hasFlag BYNAMEPARAM)
- if (tpt1.tpe.typeArgs.isEmpty) WildcardType // during erasure tpt1 is Funciton0
+ if (tpt1.tpe.typeArgs.isEmpty) WildcardType // during erasure tpt1 is Function0
else subst(tpt1.tpe.typeArgs(0))
else subst(tpt1.tpe)
} else tpt1.tpe
- newTyper(typer1.context.make(vdef, sym)).transformedOrTyped(vdef.rhs, tpt2)
+ newTyper(typer1.context.make(vdef, sym)).transformedOrTyped(vdef.rhs, EXPRmode | BYVALmode, tpt2)
}
treeCopy.ValDef(vdef, typedMods, vdef.name, tpt1, checkDead(rhs1)) setType NoType
}
@@ -1800,6 +1818,9 @@ trait Typers { self: Analyzer =>
val tparams1 = ddef.tparams mapConserve typedTypeDef
val vparamss1 = ddef.vparamss mapConserve (_ mapConserve typedValDef)
+ // complete lazy annotations
+ val annots = meth.annotations
+
for (vparams1 <- vparamss1; vparam1 <- vparams1 dropRight 1)
if (isRepeatedParamType(vparam1.symbol.tpe))
error(vparam1.pos, "*-parameter must come last")
@@ -1824,7 +1845,7 @@ trait Typers { self: Analyzer =>
error(ddef.pos, "constructor definition not allowed here")
typed(ddef.rhs)
} else {
- transformedOrTyped(ddef.rhs, tpt1.tpe)
+ transformedOrTyped(ddef.rhs, EXPRmode, tpt1.tpe)
}
if (meth.isPrimaryConstructor && meth.isClassConstructor &&
@@ -1845,6 +1866,8 @@ trait Typers { self: Analyzer =>
reenterTypeParams(tdef.tparams) // @M!
val tparams1 = tdef.tparams mapConserve (typedTypeDef) // @M!
val typedMods = removeAnnotations(tdef.mods)
+ // complete lazy annotations
+ val annots = tdef.symbol.annotations
val rhs1 = checkNoEscaping.privates(tdef.symbol, typedType(tdef.rhs))
checkNonCyclic(tdef.symbol)
if (tdef.symbol.owner.isType)
@@ -1867,11 +1890,31 @@ trait Typers { self: Analyzer =>
}
}
+ private def isLoopHeaderLabel(name: Name): Boolean =
+ name.startsWith("while$") || name.startsWith("doWhile$")
+
def typedLabelDef(ldef: LabelDef): LabelDef = {
- val restpe = ldef.symbol.tpe.resultType
- val rhs1 = typed(ldef.rhs, restpe)
- ldef.params foreach (param => param.tpe = param.symbol.tpe)
- treeCopy.LabelDef(ldef, ldef.name, ldef.params, rhs1) setType restpe
+ if (!isLoopHeaderLabel(ldef.symbol.name) || phase.id > currentRun.typerPhase.id) {
+ val restpe = ldef.symbol.tpe.resultType
+ val rhs1 = typed(ldef.rhs, restpe)
+ ldef.params foreach (param => param.tpe = param.symbol.tpe)
+ treeCopy.LabelDef(ldef, ldef.name, ldef.params, rhs1) setType restpe
+ } else {
+ val initpe = ldef.symbol.tpe.resultType
+ val rhs1 = typed(ldef.rhs)
+ val restpe = rhs1.tpe
+ if (restpe == initpe) { // stable result, no need to check again
+ ldef.params foreach (param => param.tpe = param.symbol.tpe)
+ treeCopy.LabelDef(ldef, ldef.name, ldef.params, rhs1) setType restpe
+ } else {
+ context.scope.unlink(ldef.symbol)
+ val sym2 = namer.enterInScope(
+ context.owner.newLabel(ldef.pos, ldef.name) setInfo MethodType(List(), restpe))
+ val rhs2 = typed(resetAllAttrs(ldef.rhs), restpe)
+ ldef.params foreach (param => param.tpe = param.symbol.tpe)
+ treeCopy.LabelDef(ldef, ldef.name, ldef.params, rhs2) setSymbol sym2 setType restpe
+ }
+ }
}
protected def typedFunctionIDE(fun : Function, txt : Context) = {}
@@ -1991,7 +2034,7 @@ trait Typers { self: Analyzer =>
||
pt.typeSymbol == PartialFunctionClass &&
fun.vparams.length == 1 && fun.body.isInstanceOf[Match])
- && // see bug901 for a reason why next conditions are neeed
+ && // see bug901 for a reason why next conditions are needed
(pt.normalize.typeArgs.length - 1 == fun.vparams.length
||
fun.vparams.exists(_.tpt.isEmpty)))
@@ -2095,7 +2138,7 @@ trait Typers { self: Analyzer =>
} else {
val localTyper = if (inBlock || (stat.isDef && !stat.isInstanceOf[LabelDef])) this
else newTyper(context.make(stat, exprOwner))
- val result = checkDead(localTyper.typed(stat))
+ val result = checkDead(localTyper.typed(stat, EXPRmode | BYVALmode, WildcardType))
if (treeInfo.isSelfOrSuperConstrCall(result)) {
context.inConstructorSuffix = true
if (treeInfo.isSelfConstrCall(result) && result.symbol.pos.pointOrElse(0) >= exprOwner.enclMethod.pos.pointOrElse(0))
@@ -2175,11 +2218,11 @@ trait Typers { self: Analyzer =>
args mapConserve (arg => typedArg(arg, mode, 0, WildcardType))
def typedArgs(args: List[Tree], mode: Int, originalFormals: List[Type], adaptedFormals: List[Type]) = {
- def newmode(i: Int) =
- if (isVarArgTpes(originalFormals) && i >= originalFormals.length - 1) STARmode else 0
-
- for (((arg, formal), i) <- (args zip adaptedFormals).zipWithIndex) yield
- typedArg(arg, mode, newmode(i), formal)
+ var newmodes = originalFormals map ((tp: Type) => if (tp.typeSymbol != ByNameParamClass) BYVALmode else 0)
+ if (isVarArgTpes(originalFormals)) // TR check really necessary?
+ newmodes = newmodes.take(newmodes.length-1) ::: List.fill(args.length - originalFormals.length + 1)(STARmode | BYVALmode)
+ for (((arg, formal), m) <- ((args zip adaptedFormals) zip newmodes)) yield
+ typedArg(arg, mode, m, formal)
}
/** Does function need to be instantiated, because a missing parameter
@@ -2257,12 +2300,12 @@ trait Typers { self: Analyzer =>
val args1 = args map {
case arg @ AssignOrNamedArg(Ident(name), rhs) =>
// named args: only type the righthand sides ("unknown identifier" errors otherwise)
- val rhs1 = typedArg(rhs, amode, 0, WildcardType)
+ val rhs1 = typedArg(rhs, amode, BYVALmode, WildcardType)
argtpes += NamedType(name, rhs1.tpe.deconst)
// the assign is untyped; that's ok because we call doTypedApply
atPos(arg.pos) { new AssignOrNamedArg(arg.lhs , rhs1) }
case arg =>
- val arg1 = typedArg(arg, amode, 0, WildcardType)
+ val arg1 = typedArg(arg, amode, BYVALmode, WildcardType)
argtpes += arg1.tpe.deconst
arg1
}
@@ -2346,7 +2389,7 @@ trait Typers { self: Analyzer =>
true
case _ => false
}
- val (allArgs, missing) = addDefaults(args, qual, targs, previousArgss, params, fun.pos.focus)
+ val (allArgs, missing) = addDefaults(args, qual, targs, previousArgss, params, fun.pos.focus, context)
if (allArgs.length == formals.length) {
// useful when a default doesn't match parameter type, e.g. def f[T](x:T="a"); f[Int]()
context.diagnostic = "Error occured in an application involving default arguments." :: context.diagnostic
@@ -2425,10 +2468,15 @@ trait Typers { self: Analyzer =>
val lenientTargs = protoTypeArgs(tparams, formals, mt.resultApprox, pt)
val strictTargs = (lenientTargs, tparams).zipped map ((targ, tparam) =>
if (targ == WildcardType) tparam.tpe else targ) //@M TODO: should probably be .tpeHK
- def typedArgToPoly(arg: Tree, formal: Type): Tree = {
+ var remainingParams = paramTypes
+ def typedArgToPoly(arg: Tree, formal: Type): Tree = { //TR TODO: cleanup
val lenientPt = formal.instantiateTypeParams(tparams, lenientTargs)
+ val newmode =
+ if (remainingParams.head.typeSymbol == ByNameParamClass) POLYmode
+ else POLYmode | BYVALmode
+ if (remainingParams.tail.nonEmpty) remainingParams = remainingParams.tail
// println("typedArgToPoly(arg, formal): "+(arg, formal))
- val arg1 = typedArg(arg, argMode(fun, mode), POLYmode, lenientPt)
+ val arg1 = typedArg(arg, argMode(fun, mode), newmode, lenientPt)
val argtparams = context.extractUndetparams()
// println("typedArgToPoly(arg1, argtparams): "+(arg1, argtparams))
if (!argtparams.isEmpty) {
@@ -2483,7 +2531,7 @@ trait Typers { self: Analyzer =>
error(fun.pos, "cannot resolve overloaded unapply")
(ErrorType, List())
}
- val (unappFormal, freeVars) = freshArgType(unappType)
+ val (unappFormal, freeVars) = freshArgType(unappType.skolemizeExistential(context.owner, tree))
val context1 = context.makeNewScope(context.tree, context.owner)
freeVars foreach context1.scope.enter
val typer1 = newTyper(context1)
@@ -2507,7 +2555,7 @@ trait Typers { self: Analyzer =>
//Console.println(" contains?"+sym.tpe.decls.lookup(fun.symbol.name));
if(sym != fun.symbol.owner && (sym.isPackageClass||sym.isModuleClass) /*(1)*/ ) { // (1) see 'files/pos/unapplyVal.scala'
if(fun.symbol.owner.isClass) {
- mkThisType(fun.symbol.owner)
+ ThisType(fun.symbol.owner)
} else {
//Console.println("2 ThisType("+fun.symbol.owner+")")
NoPrefix // see 'files/run/unapplyComplex.scala'
@@ -2570,13 +2618,18 @@ trait Typers { self: Analyzer =>
hasError = true
annotationError
}
- def needConst(tr: Tree): None.type = {
- error(tr.pos, "annotation argument needs to be a constant; found: "+tr)
- None
+
+ def tryConst(tr: Tree, pt: Type) = typed(tr, EXPRmode, pt) match {
+ // null cannot be used as constant value for classfile annotations
+ case l @ Literal(c) if !(l.isErroneous || c.value == null) =>
+ Some(LiteralAnnotArg(c))
+ case _ =>
+ error(tr.pos, "annotation argument needs to be a constant; found: "+tr)
+ None
}
/** Converts an untyped tree to a ClassfileAnnotArg. If the conversion fails,
- * an error message is reporded and None is returned.
+ * an error message is reported and None is returned.
*/
def tree2ConstArg(tree: Tree, pt: Type): Option[ClassfileAnnotArg] = tree match {
case ann @ Apply(Select(New(tpt), nme.CONSTRUCTOR), args) =>
@@ -2587,27 +2640,30 @@ trait Typers { self: Analyzer =>
None
} else Some(NestedAnnotArg(annInfo))
- // use of: object Array.apply[A <: AnyRef](args: A*): Array[A] = ...
- // and object Array.apply(args: Int*): Array[Int] = ... (and similar)
- case Apply(fun, members) =>
+ // use of Array.apply[T: ClassManifest](xs: T*): Array[T]
+ // and Array.apply(x: Int, xs: Int*): Array[Int] (and similar)
+ case Apply(fun, args) =>
val typedFun = typed(fun, funMode(mode), WildcardType)
if (typedFun.symbol.owner == ArrayModule.moduleClass &&
- typedFun.symbol.name == nme.apply &&
- pt.typeSymbol == ArrayClass &&
- !pt.typeArgs.isEmpty)
- trees2ConstArg(members, pt.typeArgs.head)
+ typedFun.symbol.name == nme.apply)
+ pt match {
+ case TypeRef(_, sym, argts) if (sym == ArrayClass && !argts.isEmpty) =>
+ trees2ConstArg(args, argts.head)
+ case _ =>
+ // For classfile annotations, pt can only be T:
+ // BT = Int, .., String, Class[_], JavaAnnotClass
+ // T = BT | Array[BT]
+ // So an array literal as argument can only be valid if pt is Array[_]
+ error(tree.pos, "found array constant, expected argument of type "+ pt)
+ None
+ }
else
- needConst(tree)
+ tryConst(tree, pt)
case Typed(t, _) => tree2ConstArg(t, pt)
- case tree => typed(tree, EXPRmode, pt) match {
- // null cannot be used as constant value for classfile annotations
- case l @ Literal(c) if !(l.isErroneous || c.value == null) =>
- Some(LiteralAnnotArg(c))
- case _ =>
- needConst(tree)
- }
+ case tree =>
+ tryConst(tree, pt)
}
def trees2ConstArg(trees: List[Tree], pt: Type): Option[ArrayAnnotArg] = {
val args = trees.map(tree2ConstArg(_, pt))
@@ -2624,7 +2680,7 @@ trait Typers { self: Analyzer =>
case Select(New(tpt), nme.CONSTRUCTOR) =>
(fun, outerArgss)
case _ =>
- error(fun.pos, "unexpected tree in annotationn: "+ fun)
+ error(fun.pos, "unexpected tree in annotation: "+ fun)
(setError(fun), outerArgss)
}
extract(ann, List())
@@ -2661,7 +2717,7 @@ trait Typers { self: Analyzer =>
error(arg.pos, "unknown annotation argument name: " + name)
(nme.ERROR, None)
} else if (!names.contains(sym)) {
- error(arg.pos, "duplicate value for anontation argument " + name)
+ error(arg.pos, "duplicate value for annotation argument " + name)
(nme.ERROR, None)
} else {
names -= sym
@@ -2676,7 +2732,7 @@ trait Typers { self: Analyzer =>
for (name <- names) {
if (!name.annotations.contains(AnnotationInfo(AnnotationDefaultAttr.tpe, List(), List())) &&
!name.hasFlag(DEFAULTPARAM))
- error(ann.pos, "annotation " + annType.typeSymbol.fullNameString + " is missing argument " + name.name)
+ error(ann.pos, "annotation " + annType.typeSymbol.fullName + " is missing argument " + name.name)
}
if (hasError) annotationError
@@ -2959,7 +3015,7 @@ trait Typers { self: Analyzer =>
*/
protected def typed1(tree: Tree, mode: Int, pt: Type): Tree = {
//Console.println("typed1("+tree.getClass()+","+Integer.toHexString(mode)+","+pt+")")
- def ptOrLub(tps: List[Type]) = if (isFullyDefined(pt)) pt else weakLub(tps map (_.deconst))
+ def ptOrLub(tps: List[Type]) = if (isFullyDefined(pt)) (pt, false) else weakLub(tps map (_.deconst))
//@M! get the type of the qualifier in a Select tree, otherwise: NoType
def prefixType(fun: Tree): Type = fun match {
@@ -3024,7 +3080,7 @@ trait Typers { self: Analyzer =>
} else {
// An annotated term, created with annotation ascription
// term : @annot()
- def annotTypeTree(ainfo: AnnotationInfo): Tree =
+ def annotTypeTree(ainfo: AnnotationInfo): Tree = //TR: function not used ??
TypeTree(arg1.tpe.withAnnotation(ainfo)) setOriginal tree
if (ann.tpe == null) {
@@ -3046,7 +3102,7 @@ trait Typers { self: Analyzer =>
context.owner.newAliasType(tree.pos, name) setInfo pt
else
context.owner.newAbstractType(tree.pos, name) setInfo
- mkTypeBounds(NothingClass.tpe, AnyClass.tpe)
+ TypeBounds(NothingClass.tpe, AnyClass.tpe)
val rawInfo = vble.rawInfo
vble = if (vble.name == nme.WILDCARD.toTypeName) context.scope.enter(vble)
else namer.enterInScope(vble)
@@ -3071,7 +3127,7 @@ trait Typers { self: Analyzer =>
trackSetInfo(vble)(
if (treeInfo.isSequenceValued(body)) seqType(body1.tpe)
else body1.tpe)
- treeCopy.Bind(tree, name, body1) setSymbol vble setType body1.tpe // buraq, was: pt
+ treeCopy.Bind(tree, name, body1) setSymbol vble setType body1.tpe // burak, was: pt
}
}
@@ -3085,14 +3141,9 @@ trait Typers { self: Analyzer =>
}
def typedAssign(lhs: Tree, rhs: Tree): Tree = {
- def mayBeVarGetter(sym: Symbol) = sym.info match {
- case PolyType(List(), _) => sym.owner.isClass && !sym.isStable
- case _: ImplicitMethodType => sym.owner.isClass && !sym.isStable
- case _ => false
- }
val lhs1 = typed(lhs, EXPRmode | LHSmode, WildcardType)
val varsym = lhs1.symbol
- if ((varsym ne null) && mayBeVarGetter(varsym))
+ if ((varsym ne null) && treeInfo.mayBeVarGetter(varsym))
lhs1 match {
case Select(qual, name) =>
return typed(
@@ -3105,7 +3156,7 @@ trait Typers { self: Analyzer =>
}
if ((varsym ne null) && (varsym.isVariable || varsym.isValue && phase.erasedTypes)) {
- val rhs1 = typed(rhs, lhs1.tpe)
+ val rhs1 = typed(rhs, EXPRmode | BYVALmode, lhs1.tpe)
treeCopy.Assign(tree, lhs1, checkDead(rhs1)) setType UnitClass.tpe
} else {
if (!lhs1.tpe.isError) {
@@ -3119,15 +3170,15 @@ trait Typers { self: Analyzer =>
}
def typedIf(cond: Tree, thenp: Tree, elsep: Tree) = {
- val cond1 = checkDead(typed(cond, BooleanClass.tpe))
- if (elsep.isEmpty) { // in the future, should be unecessary
+ val cond1 = checkDead(typed(cond, EXPRmode | BYVALmode, BooleanClass.tpe))
+ if (elsep.isEmpty) { // in the future, should be unnecessary
val thenp1 = typed(thenp, UnitClass.tpe)
treeCopy.If(tree, cond1, thenp1, elsep) setType thenp1.tpe
} else {
var thenp1 = typed(thenp, pt)
var elsep1 = typed(elsep, pt)
- val owntype = ptOrLub(List(thenp1.tpe, elsep1.tpe))
- if (isNumericValueType(owntype)) {
+ val (owntype, needAdapt) = ptOrLub(List(thenp1.tpe, elsep1.tpe))
+ if (needAdapt) { //isNumericValueType(owntype)) {
thenp1 = adapt(thenp1, mode, owntype)
elsep1 = adapt(elsep1, mode, owntype)
}
@@ -3150,7 +3201,7 @@ trait Typers { self: Analyzer =>
" has return statement; needs result type")
} else {
context.enclMethod.returnsSeen = true
- val expr1: Tree = typed(expr, restpt0.tpe)
+ val expr1: Tree = typed(expr, EXPRmode | BYVALmode, restpt0.tpe)
treeCopy.Return(tree, checkDead(expr1)) setSymbol enclMethod.owner setType NothingClass.tpe
}
}
@@ -3273,7 +3324,7 @@ trait Typers { self: Analyzer =>
case _ => false
}
}
- if (errorInResult(fun) || (args exists errorInResult)) {
+ if (errorInResult(fun) || (args exists errorInResult) || errorInResult(tree)) {
if (printTypings) println("second try for: "+fun+" and "+args)
val Select(qual, name) = fun
val args1 = tryTypedArgs(args, argMode(fun, mode), ex)
@@ -3305,10 +3356,14 @@ trait Typers { self: Analyzer =>
case fun1: Tree =>
val fun2 = if (stableApplication) stabilizeFun(fun1, mode, pt) else fun1
incCounter(typedApplyCount)
+ def isImplicitMethod(tpe: Type) = tpe match {
+ case mt: MethodType => mt.isImplicit
+ case _ => false
+ }
val res =
if (phase.id <= currentRun.typerPhase.id &&
fun2.isInstanceOf[Select] &&
- !fun2.tpe.isInstanceOf[ImplicitMethodType] &&
+ !isImplicitMethod(fun2.tpe) &&
((fun2.symbol eq null) || !fun2.symbol.isConstructor) &&
(mode & (EXPRmode | SNDTRYmode)) == EXPRmode) {
tryTypedApply(fun2, args)
@@ -3325,7 +3380,7 @@ trait Typers { self: Analyzer =>
// this check is needed to avoid infinite recursion in Duplicators
// (calling typed1 more than once for the same tree)
if (checked ne res) typed { atPos(tree.pos)(checked) }
- else res
+ else res
} else res
/* Would like to do the following instead, but curiously this fails; todo: investigate
if (fun2.symbol.name == nme.apply && fun2.symbol.owner == ArrayClass)
@@ -3439,7 +3494,7 @@ trait Typers { self: Analyzer =>
} else {
findMixinSuper(clazz.info)
}
- tree setSymbol clazz setType mkSuperType(clazz.thisType, owntype)
+ tree setSymbol clazz setType SuperType(clazz.thisType, owntype)
}
}
@@ -3482,36 +3537,40 @@ trait Typers { self: Analyzer =>
val qual1 = adaptToName(qual, name)
if (qual1 ne qual) return typed(treeCopy.Select(tree, qual1, name), mode, pt)
}
+
if (!reallyExists(sym)) {
+ if (context.owner.toplevelClass.hasFlag(JAVA) && name.isTypeName) {
+ val tree1 = atPos(tree.pos) { gen.convertToSelectFromType(qual, name) }
+ if (tree1 != EmptyTree) return typed1(tree1, mode, pt)
+ }
+
if (settings.debug.value) Console.err.println("qual = "+qual+":"+qual.tpe+"\nSymbol="+qual.tpe.termSymbol+"\nsymbol-info = "+qual.tpe.termSymbol.info+"\nscope-id = "+qual.tpe.termSymbol.info.decls.hashCode()+"\nmembers = "+qual.tpe.members+"\nname = "+name+"\nfound = "+sym+"\nowner = "+context.enclClass.owner)
+
+ def makeErrorTree = {
+ val tree1 = tree match {
+ case Select(_, _) => treeCopy.Select(tree, qual, name)
+ case SelectFromTypeTree(_, _) => treeCopy.SelectFromTypeTree(tree, qual, name)
+ }
+ setError(tree1)
+ }
+
+ if (name == nme.ERROR && onlyPresentation)
+ return makeErrorTree
+
if (!qual.tpe.widen.isErroneous) {
error(tree.pos,
if (name == nme.CONSTRUCTOR)
qual.tpe.widen+" does not have a constructor"
else
decode(name)+" is not a member of "+
- (if (qual.tpe.typeSymbol.isTypeParameterOrSkolem) "type parameter " else "") +
- qual.tpe.widen +
+ (if (qual.tpe.typeSymbol.isTypeParameterOrSkolem) "type parameter " else "") +
+ qual.tpe.widen +
(if ((context.unit ne null) && // Martin: why is this condition needed?
qual.pos.isDefined && tree.pos.isDefined && qual.pos.line < tree.pos.line)
"\npossible cause: maybe a semicolon is missing before `"+decode(name)+"'?"
else ""))
}
-
- // Temporary workaround to retain type information for qual so that askTypeCompletion has something to
- // work with. This appears to work in the context of the IDE, but is incorrect and needs to be
- // revisited.
- if (onlyPresentation) {
- // Nb. this appears to throw away the effects of setError, but some appear to be
- // retained across the copy.
- setError(tree)
- val tree1 = tree match {
- case Select(_, _) => treeCopy.Select(tree, qual, name)
- case SelectFromTypeTree(_, _) => treeCopy.SelectFromTypeTree(tree, qual, name)
- }
- tree1
- } else
- setError(tree)
+ if (onlyPresentation) makeErrorTree else setError(tree)
} else {
val tree1 = tree match {
case Select(_, _) => treeCopy.Select(tree, qual, name)
@@ -3547,7 +3606,7 @@ trait Typers { self: Analyzer =>
var defSym: Symbol = tree.symbol // the directly found symbol
var pre: Type = NoPrefix // the prefix type of defSym, if a class member
- var qual: Tree = EmptyTree // the qualififier tree if transformed tree is a select
+ var qual: Tree = EmptyTree // the qualifier tree if transformed tree is a select
// A symbol qualifies if it exists and is not stale. Stale symbols
// are made to disappear here. In addition,
@@ -3570,6 +3629,7 @@ trait Typers { self: Analyzer =>
}
while (defSym == NoSymbol && cx != NoContext) {
+ currentRun.compileSourceFor(context.asInstanceOf[analyzer.Context], name)
pre = cx.enclClass.prefix
defEntry = cx.scope.lookupEntry(name)
if ((defEntry ne null) && qualifies(defEntry.sym)) {
@@ -3599,7 +3659,7 @@ trait Typers { self: Analyzer =>
if (defSym.exists && impSym.exists) {
// imported symbols take precedence over package-owned symbols in different
- // compilation units. Defined symbols take precedence over errenous imports.
+ // compilation units. Defined symbols take precedence over erroneous imports.
if (defSym.definedInPackage &&
(!currentRun.compiles(defSym) ||
(context.unit ne null) && defSym.sourceFile != context.unit.source.file))
@@ -3698,7 +3758,7 @@ trait Typers { self: Analyzer =>
//@M! the polytype denotes the expected kind
}
val argtypes = args1 map (_.tpe)
- val owntype = if (tpt1.symbol.isClass || tpt1.symbol.isTypeMember)
+ val owntype = if (tpt1.symbol.isClass || tpt1.symbol.isNonClassType)
// @M! added the latter condition
appliedType(tpt1.tpe, argtypes)
else tpt1.tpe.instantiateTypeParams(tparams, argtypes)
@@ -3734,6 +3794,8 @@ trait Typers { self: Analyzer =>
case PackageDef(pid, stats) =>
val pid1 = typedQualifier(pid).asInstanceOf[RefTree]
assert(sym.moduleClass ne NoSymbol, sym)
+ // complete lazy annotations
+ val annots = sym.annotations
val stats1 = newTyper(context.make(tree, sym.moduleClass, sym.info.decls))
.typedStats(stats, NoSymbol)
treeCopy.PackageDef(tree, pid1, stats1) setType NoType
@@ -3826,10 +3888,10 @@ trait Typers { self: Analyzer =>
val body = treeCopy.Match(tree, selector1, cases)
typed1(atPos(tree.pos) { Function(params, body) }, mode, pt)
} else {
- val selector1 = checkDead(typed(selector))
+ val selector1 = checkDead(typed(selector, EXPRmode | BYVALmode, WildcardType))
var cases1 = typedCases(tree, cases, selector1.tpe.widen, pt)
- val owntype = ptOrLub(cases1 map (_.tpe))
- if (isNumericValueType(owntype)) {
+ val (owntype, needAdapt) = ptOrLub(cases1 map (_.tpe))
+ if (needAdapt) {
cases1 = cases1 map (adaptCase(_, owntype))
}
treeCopy.Match(tree, selector1, cases1) setType owntype
@@ -3843,15 +3905,15 @@ trait Typers { self: Analyzer =>
var catches1 = typedCases(tree, catches, ThrowableClass.tpe, pt)
val finalizer1 = if (finalizer.isEmpty) finalizer
else typed(finalizer, UnitClass.tpe)
- val owntype = ptOrLub(block1.tpe :: (catches1 map (_.tpe)))
- if (isNumericValueType(owntype)) {
+ val (owntype, needAdapt) = ptOrLub(block1.tpe :: (catches1 map (_.tpe)))
+ if (needAdapt) {
block1 = adapt(block1, mode, owntype)
catches1 = catches1 map (adaptCase(_, owntype))
}
treeCopy.Try(tree, block1, catches1, finalizer1) setType owntype
case Throw(expr) =>
- val expr1 = typed(expr, ThrowableClass.tpe)
+ val expr1 = typed(expr, EXPRmode | BYVALmode, ThrowableClass.tpe)
treeCopy.Throw(tree, expr1) setType NothingClass.tpe
case New(tpt: Tree) =>
@@ -3943,7 +4005,7 @@ trait Typers { self: Analyzer =>
error(tree.pos, "cannot create a generic multi-dimensional array of more than "+MaxArrayDims+" dimensions")
val newArrayApp = atPos(tree.pos) {
val manif = getManifestTree(tree.pos, manifType, false)
- Apply(Select(manif, if (level == 1) "newArray" else "newArray"+level), args)
+ new ApplyToImplicitArgs(Select(manif, if (level == 1) "newArray" else "newArray"+level), args)
}
typed(newArrayApp, mode, pt)
case tree1 =>
@@ -4007,7 +4069,7 @@ trait Typers { self: Analyzer =>
case Literal(value) =>
tree setType (
if (value.tag == UnitTag) UnitClass.tpe
- else mkConstantType(value))
+ else ConstantType(value))
case SingletonTypeTree(ref) =>
val ref1 = checkStable(
@@ -4028,7 +4090,7 @@ trait Typers { self: Analyzer =>
case TypeBoundsTree(lo, hi) =>
val lo1 = typedType(lo, mode)
val hi1 = typedType(hi, mode)
- treeCopy.TypeBoundsTree(tree, lo1, hi1) setType mkTypeBounds(lo1.tpe, hi1.tpe)
+ treeCopy.TypeBoundsTree(tree, lo1, hi1) setType TypeBounds(lo1.tpe, hi1.tpe)
case etpt @ ExistentialTypeTree(_, _) =>
newTyper(context.makeNewScope(tree, context.owner)).typedExistentialTypeTree(etpt, mode)
@@ -4045,7 +4107,7 @@ trait Typers { self: Analyzer =>
assert(onlyPresentation) // should not happen in normal circumstances.
tree setType tree.symbol.tpe
case _ =>
- throw new Error("unexpected tree: " + tree.getClass + "\n" + tree)//debug
+ abort("unexpected tree: " + tree.getClass + "\n" + tree)//debug
}
}
@@ -4096,7 +4158,6 @@ trait Typers { self: Analyzer =>
if (phase.id <= currentRun.typerPhase.id) signalDone(context.asInstanceOf[analyzer.Context], tree, result)
result
} catch {
- case ex: ControlException => throw ex
case ex: TypeError =>
tree.tpe = null
if (printTypings) println("caught "+ex+" in typed: "+tree);//DEBUG
@@ -4108,13 +4169,9 @@ trait Typers { self: Analyzer =>
if ((context ne null) && (context.unit ne null) &&
(context.unit.source ne null) && (tree ne null))
logError("AT: " + (tree.pos).dbgString, ex);
- throw(ex)
-/*
- case ex: java.lang.Error =>
- Console.println("exception when typing "+tree+", pt = "+pt)
throw ex
-*/ //debug
- } finally {
+ }
+ finally {
if (Statistics.enabled) {
val t = currentTime()
microsByType(pendingTreeTypes.head) += ((t - typerTime) / 1000).toInt
@@ -4154,14 +4211,17 @@ trait Typers { self: Analyzer =>
/** Types qualifier <code>tree</code> of a select node.
* E.g. is tree occurs in a context like <code>tree.m</code>.
- *
- * @param tree ...
- * @return ...
+ */
+ def typedQualifier(tree: Tree, mode: Int, pt: Type): Tree =
+ typed(tree, EXPRmode | QUALmode | POLYmode | mode & TYPEPATmode, pt) // TR: don't set BYVALmode, since qualifier might end up as by-name param to an implicit
+
+ /** Types qualifier <code>tree</code> of a select node.
+ * E.g. is tree occurs in a context like <code>tree.m</code>.
*/
def typedQualifier(tree: Tree, mode: Int): Tree =
- typed(tree, EXPRmode | QUALmode | POLYmode | mode & TYPEPATmode, WildcardType)
+ typedQualifier(tree, mode, WildcardType)
- def typedQualifier(tree: Tree): Tree = typedQualifier(tree, NOmode)
+ def typedQualifier(tree: Tree): Tree = typedQualifier(tree, NOmode, WildcardType)
/** Types function part of an application */
def typedOperator(tree: Tree): Tree =
@@ -4169,20 +4229,13 @@ trait Typers { self: Analyzer =>
/** Types a pattern with prototype <code>pt</code> */
def typedPattern(tree: Tree, pt: Type): Tree = {
- // The commented out code stems from investigation into whether
- // "abc" match { case Seq('a', 'b', 'c') => true }
- // can be ruled out statically. At present this is a runtime
- // error both because there is an implicit from String to Seq
- // (even though such implicits are not used by the matcher) and
- // because the typer is fine with concluding that "abc" might
- // be of type "String with Seq[T]" and thus eligible for a call
- // to unapplySeq.
- //
- // val savedImplicitsEnabled = context.implicitsEnabled
- // context.implicitsEnabled = false
- // try
- typed(tree, PATTERNmode, pt)
- // finally context.implicitsEnabled = savedImplicitsEnabled
+ // We disable implicits because otherwise some constructs will
+ // type check which should not. The pattern matcher does not
+ // perform implicit conversions in an attempt to consummate a match.
+ val savedImplicitsEnabled = context.implicitsEnabled
+ context.implicitsEnabled = false
+ try typed(tree, PATTERNmode, pt)
+ finally context.implicitsEnabled = savedImplicitsEnabled
}
/** Types a (fully parameterized) type tree */
@@ -4235,9 +4288,9 @@ trait Typers { self: Analyzer =>
packedType(tree1, context.owner)
}
- def transformedOrTyped(tree: Tree, pt: Type): Tree = transformed.get(tree) match {
+ def transformedOrTyped(tree: Tree, mode: Int, pt: Type): Tree = transformed.get(tree) match {
case Some(tree1) => transformed -= tree; tree1
- case None => typed(tree, pt)
+ case None => typed(tree, mode, pt)
}
def findManifest(tp: Type, full: Boolean) = atPhase(currentRun.typerPhase) {
diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
index 3898e46c0b..a60721f0ca 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
@@ -171,10 +171,11 @@ trait Unapplies extends ast.TreeDSL
case _ => nme.unapply
}
val cparams = List(ValDef(Modifiers(PARAM | SYNTHETIC), paramName, classType(cdef, tparams), EmptyTree))
+ val ifNull = if (constrParamss(cdef).head.size == 0) FALSE else REF(NoneModule)
+ val body = nullSafe({ case Ident(x) => caseClassUnapplyReturnValue(x, cdef.symbol) }, ifNull)(Ident(paramName))
atPos(cdef.pos.focus)(
- DefDef(caseMods, method, tparams, List(cparams), TypeTree(),
- caseClassUnapplyReturnValue(paramName, cdef.symbol))
+ DefDef(caseMods, method, tparams, List(cparams), TypeTree(), body)
)
}
diff --git a/src/compiler/scala/tools/nsc/util/ArgumentsExpander.scala b/src/compiler/scala/tools/nsc/util/ArgumentsExpander.scala
deleted file mode 100644
index 2d8fc8c502..0000000000
--- a/src/compiler/scala/tools/nsc/util/ArgumentsExpander.scala
+++ /dev/null
@@ -1,43 +0,0 @@
-package scala.tools.nsc
-package util
-
-import java.io.{FileReader, BufferedReader, StreamTokenizer, FileNotFoundException}
-import scala.tools.nsc.io.AbstractFile
-import scala.collection.mutable.ListBuffer
-
-/**
- * Expands all arguments starting with @ to the contents of the
- * file named like each argument.
- */
-object ArgumentsExpander {
-
- def expandArg(arg: String): List[String] =
- expandFromFile(arg.substring(1))
-
- /*
- * Extracts all the arguments in a specified file.
- * Throws FileNotFoundException if the file does not exist.
- */
- private def expandFromFile(fileName: String): List[String] = {
- val f = AbstractFile.getFile(fileName)
- if (f eq null) throw new FileNotFoundException(
- "argument file "+ fileName +" could not be found")
-
- val in = new BufferedReader(new FileReader(f.file))
-
- val tokenizer = new StreamTokenizer( in )
- tokenizer.resetSyntax
- tokenizer.wordChars(' ', 255)
- tokenizer.whitespaceChars(0, ' ')
- tokenizer.commentChar('#')
- tokenizer.quoteChar('"')
- tokenizer.quoteChar('\'')
-
- val ts = new ListBuffer[String]
- while (tokenizer.nextToken() != StreamTokenizer.TT_EOF) {
- ts += tokenizer.sval
- }
- in.close()
- ts.toList
- }
-}
diff --git a/src/compiler/scala/tools/nsc/util/CharArrayReader.scala b/src/compiler/scala/tools/nsc/util/CharArrayReader.scala
index a6c9edb8d7..39a104fb14 100644
--- a/src/compiler/scala/tools/nsc/util/CharArrayReader.scala
+++ b/src/compiler/scala/tools/nsc/util/CharArrayReader.scala
@@ -7,7 +7,7 @@
package scala.tools.nsc
package util
-import Chars.{LF, FF, CR, SU}
+import Chars._
abstract class CharArrayReader { self =>
@@ -35,7 +35,7 @@ abstract class CharArrayReader { self =>
/** Is last character a unicode escape \\uxxxx? */
def isUnicodeEscape = charOffset == lastUnicodeOffset
- /** Advance one character */
+ /** Advance one character; reducing CR;LF pairs to just LF */
final def nextChar() {
if (charOffset >= buf.length) {
ch = SU
@@ -44,8 +44,20 @@ abstract class CharArrayReader { self =>
ch = c
charOffset += 1
if (c == '\\') potentialUnicode()
+ else if (c < ' ') { skipCR(); potentialLineEnd() }
+ }
+ }
+
+ /** Advance one character, leaving CR;LF pairs intact */
+ final def nextRawChar() {
+ if (charOffset >= buf.length) {
+ ch = SU
+ } else {
+ val c = buf(charOffset)
+ ch = c
+ charOffset += 1
+ if (c == '\\') potentialUnicode()
else if (c < ' ') potentialLineEnd()
-// print("`"+ch+"'")
}
}
@@ -71,32 +83,23 @@ abstract class CharArrayReader { self =>
}
}
- /** Handle line ends, replace CR+LF by LF */
- private def potentialLineEnd() {
+ /** replace CR;LF by LF */
+ private def skipCR() {
if (ch == CR)
if (charOffset < buf.length && buf(charOffset) == LF) {
charOffset += 1
ch = LF
}
+ }
+
+ /** Handle line ends */
+ private def potentialLineEnd() {
if (ch == LF || ch == FF) {
lastLineStartOffset = lineStartOffset
lineStartOffset = charOffset
}
}
- /** Convert a character digit to an Int according to given base,
- * -1 if no success */
- def digit2int(ch: Char, base: Int): Int = {
- if ('0' <= ch && ch <= '9' && ch < '0' + base)
- ch - '0'
- else if ('A' <= ch && ch < 'A' + base - 10)
- ch - 'A' + 10
- else if ('a' <= ch && ch < 'a' + base - 10)
- ch - 'a' + 10
- else
- -1
- }
-
/** A new reader that takes off at the current character position */
def lookaheadReader = new CharArrayReader {
val buf = self.buf
diff --git a/src/compiler/scala/tools/nsc/util/Chars.scala b/src/compiler/scala/tools/nsc/util/Chars.scala
index ce02b67633..5a64f36eb4 100755
--- a/src/compiler/scala/tools/nsc/util/Chars.scala
+++ b/src/compiler/scala/tools/nsc/util/Chars.scala
@@ -10,7 +10,6 @@ import annotation.{ tailrec, switch }
/** Contains constants and classifier methods for characters */
object Chars {
-
// Be very careful touching these.
// Apparently trivial changes to the way you write these constants
// will cause Scanners.scala to go from a nice efficient switch to
@@ -22,6 +21,19 @@ object Chars {
final val CR = '\u000D'
final val SU = '\u001A'
+ /** Convert a character digit to an Int according to given base,
+ * -1 if no success */
+ def digit2int(ch: Char, base: Int): Int = {
+ if ('0' <= ch && ch <= '9' && ch < '0' + base)
+ ch - '0'
+ else if ('A' <= ch && ch < 'A' + base - 10)
+ ch - 'A' + 10
+ else if ('a' <= ch && ch < 'a' + base - 10)
+ ch - 'a' + 10
+ else
+ -1
+ }
+
/** Is character a line break? */
@inline def isLineBreakChar(c: Char) = (c: @switch) match {
case LF|FF|CR|SU => true
@@ -38,16 +50,11 @@ object Chars {
/** Can character start an alphanumeric Scala identifier? */
def isIdentifierStart(c: Char): Boolean =
- ('A' <= c && c <= 'Z') ||
- ('a' <= c && c <= 'a') ||
- (c == '_') || (c == '$') ||
- Character.isUnicodeIdentifierStart(c)
+ (c == '_') || (c == '$') || Character.isUnicodeIdentifierStart(c)
/** Can character form part of an alphanumeric Scala identifier? */
def isIdentifierPart(c: Char) =
- isIdentifierStart(c) ||
- ('0' <= c && c <= '9') ||
- Character.isUnicodeIdentifierPart(c)
+ (c == '$') || Character.isUnicodeIdentifierPart(c)
/** Is character a math or other symbol in Unicode? */
def isSpecial(c: Char) = {
diff --git a/src/compiler/scala/tools/nsc/util/ClassPath.scala b/src/compiler/scala/tools/nsc/util/ClassPath.scala
index a6b0a1244d..c35b7139c6 100644
--- a/src/compiler/scala/tools/nsc/util/ClassPath.scala
+++ b/src/compiler/scala/tools/nsc/util/ClassPath.scala
@@ -8,16 +8,15 @@
package scala.tools.nsc
package util
-import java.io.File
+import java.io.{ File => JFile }
import java.net.URL
-import java.util.StringTokenizer
-import scala.util.Sorting
import scala.collection.mutable.{ListBuffer, ArrayBuffer, HashSet => MutHashSet}
-import scala.tools.nsc.io.AbstractFile
-
-import ch.epfl.lamp.compiler.msil.{Type => MSILType, Assembly}
-
+import io.{ File, Directory, Path, AbstractFile }
+import scala.tools.util.StringOps.splitWhere
+import Path.isJarOrZip
+import scala.tools.util.PathResolver
+import File.pathSeparator
/** <p>
* This module provides star expansion of '-classpath' option arguments, behaves the same as
@@ -29,255 +28,322 @@ import ch.epfl.lamp.compiler.msil.{Type => MSILType, Assembly}
object ClassPath {
/** Expand single path entry */
private def expandS(pattern: String): List[String] = {
- def isJar(name: String) = name.toLowerCase endsWith ".jar"
-
- /** Get all jars in directory */
- def lsJars(f: File, filt: String => Boolean = _ => true) = {
- val list = f.listFiles()
- if (list eq null) Nil
- else list.filter(f => f.isFile() && filt(f.getName) && isJar(f.getName())).map(_.getPath()).toList
- }
+ val wildSuffix = File.separator + "*"
- val suffix = File.separator + "*"
+ /** Get all subdirectories, jars, zips out of a directory. */
+ def lsDir(dir: Directory, filt: String => Boolean = _ => true) =
+ dir.list filter (x => filt(x.name) && (x.isDirectory || isJarOrZip(x))) map (_.path) toList
def basedir(s: String) =
if (s contains File.separator) s.substring(0, s.lastIndexOf(File.separator))
else "."
- if (pattern == "*") lsJars(new File("."))
- else if (pattern endsWith suffix) lsJars(new File(pattern dropRight 2))
+ if (pattern == "*") lsDir(Directory("."))
+ else if (pattern endsWith wildSuffix) lsDir(Directory(pattern dropRight 2))
else if (pattern contains '*') {
val regexp = ("^%s$" format pattern.replaceAll("""\*""", """.*""")).r
- lsJars(new File(basedir(pattern)), regexp findFirstIn _ isDefined)
+ lsDir(Directory(pattern).parent, regexp findFirstIn _ isDefined)
}
else List(pattern)
}
- /** Split path using platform-dependent path separator */
- private def splitPath(path: String): List[String] =
- path split File.pathSeparator toList
+ /** Return duplicated classpath entries as
+ * (name, list of origins)
+ * in the order they occur on the path.
+ */
+ def findDuplicates(cp: ClassPath[_]) = {
+ def toFullName(x: (String, _, cp.AnyClassRep)) = x._1 + "." + x._3.name
+ def toOriginString(x: ClassPath[_]) = x.origin getOrElse x.name
+
+ /** Flatten everything into tuples, recombine grouped by name, filter down to 2+ entries. */
+ val flattened = (
+ for ((pkgName, pkg) <- cp.allPackagesWithNames ; clazz <- pkg.classes) yield
+ (pkgName, pkg, clazz)
+ )
+ val multipleAppearingEntries = flattened groupBy toFullName filter (_._2.size > 1)
+
+ /** Extract results. */
+ for (name <- flattened map toFullName distinct ; dups <- multipleAppearingEntries get name) yield
+ (name, dups map { case (_, cp, _) => toOriginString(cp) })
+ }
+
+ /** Split classpath using platform-dependent path separator */
+ def split(path: String): List[String] = (path split pathSeparator).toList filterNot (_ == "") distinct
+
+ /** Join classpath using platform-dependent path separator */
+ def join(paths: String*): String = paths filterNot (_ == "") mkString pathSeparator
+
+ /** Split the classpath, apply a transformation function, and reassemble it. */
+ def map(cp: String, f: String => String): String = join(split(cp) map f: _*)
+
+ /** Split the classpath, filter according to predicate, and reassemble. */
+ def filter(cp: String, p: String => Boolean): String = join(split(cp) filter p: _*)
+
+ /** Split the classpath and map them into Paths */
+ def toPaths(cp: String): List[Path] = split(cp) map (x => Path(x).toAbsolute)
+
+ /** Join the paths as a classpath */
+ def fromPaths(paths: Path*): String = join(paths map (_.path): _*)
+
+ /** Split the classpath and map them into URLs */
+ def toURLs(cp: String): List[URL] = toPaths(cp) map (_.toURL)
/** Expand path and possibly expanding stars */
def expandPath(path: String, expandStar: Boolean = true): List[String] =
- if (expandStar) splitPath(path).flatMap(expandS(_))
- else splitPath(path)
-
+ if (expandStar) split(path) flatMap expandS
+ else split(path)
- def validPackage(name: String) =
- !(name.equals("META-INF") || name.startsWith("."))
+ /** Expand dir out to contents, a la extdir */
+ def expandDir(extdir: String): List[String] = {
+ val dir = Option(AbstractFile getDirectory extdir) getOrElse (return Nil)
+ dir filter (_.isClassContainer) map (dir.sfile.get / _.name path) toList
+ }
- def validSourceFile(name: String) =
- (name.endsWith(".scala") || name.endsWith(".java"))
+ /** A useful name filter. */
+ def isTraitImplementation(name: String) = name endsWith "$class.class"
- var XO = false
- def validClassFile(name: String) =
- if (name.endsWith(".class")) {
- val className = name.substring(0, name.length - 6)
- (!className.endsWith("$class") || XO)
- } else false
+ import java.net.MalformedURLException
+ def specToURL(spec: String): Option[URL] =
+ try Some(new URL(spec))
+ catch { case _: MalformedURLException => None }
+ /** A class modeling aspects of a ClassPath which should be
+ * propagated to any classpaths it creates.
+ */
+ abstract class ClassPathContext[T] {
+ /** A filter which can be used to exclude entities from the classpath
+ * based on their name.
+ */
+ def isValidName(name: String): Boolean = true
+
+ /** From the representation to its identifier.
+ */
+ def toBinaryName(rep: T): String
+
+ /** Create a new classpath based on the abstract file.
+ */
+ def newClassPath(file: AbstractFile): ClassPath[T]
+
+ /** Creators for sub classpaths which preserve this context.
+ */
+ def sourcesInPath(path: String): List[ClassPath[T]] =
+ for (file <- expandPath(path, false) ; dir <- Option(AbstractFile getDirectory file)) yield
+ new SourcePath[T](dir, this)
+
+ def contentsOfDirsInPath(path: String): List[ClassPath[T]] =
+ for (dir <- expandPath(path, false) ; name <- expandDir(dir) ; entry <- Option(AbstractFile getDirectory name)) yield
+ newClassPath(entry)
+
+ def classesAtAllURLS(path: String): List[ClassPath[T]] =
+ (path split " ").toList flatMap classesAtURL
+
+ def classesAtURL(spec: String) =
+ for (url <- specToURL(spec).toList ; location <- Option(AbstractFile getURL url)) yield
+ newClassPath(location)
+
+ def classesInExpandedPath(path: String) = classesInPathImpl(path, true)
+ def classesInPath(path: String) = classesInPathImpl(path, false)
+
+ // Internal
+ private def classesInPathImpl(path: String, expand: Boolean) =
+ for (file <- expandPath(path, expand) ; dir <- Option(AbstractFile getDirectory file)) yield
+ newClassPath(dir)
+ }
- def collectTypes(assemFile: AbstractFile) = {
- var res: Array[MSILType] = MSILType.EmptyTypes
- val assem = Assembly.LoadFrom(assemFile.path)
- if (assem != null) {
- // DeclaringType == null: true for non-inner classes
- res = assem.GetTypes().filter((typ: MSILType) => typ.DeclaringType == null)
- Sorting.stableSort(res, (t1: MSILType, t2: MSILType) => (t1.FullName compareTo t2.FullName) < 0)
+ class JavaContext extends ClassPathContext[AbstractFile] {
+ def toBinaryName(rep: AbstractFile) = {
+ assert(rep.name endsWith ".class", rep.name)
+ rep.name dropRight 6
}
- res
+ def newClassPath(dir: AbstractFile) = new DirectoryClassPath(dir, this)
}
-}
-/**
- * Represents classes which can be loaded with a ClassfileLoader/MSILTypeLoader
- * and / or a SourcefileLoader.
- */
-case class ClassRep[T](binary: Option[T], source: Option[AbstractFile]) {
- def name = {
- if (binary.isDefined) binary.get match {
- case f: AbstractFile =>
- assert(f.name.endsWith(".class"), f.name)
- f.name.substring(0, f.name.length - 6)
- case t: MSILType =>
- t.Name
- case c =>
- throw new FatalError("Unexpected binary class representation: "+ c)
- } else {
- assert(source.isDefined)
- val nme = source.get.name
- if (nme.endsWith(".scala"))
- nme.substring(0, nme.length - 6)
- else if (nme.endsWith(".java"))
- nme.substring(0, nme.length - 5)
- else
- throw new FatalError("Unexpected source file ending: "+ nme)
- }
+ object DefaultJavaContext extends JavaContext {
+ override def isValidName(name: String) = !isTraitImplementation(name)
+ }
+
+ /** From the source file to its identifier.
+ */
+ def toSourceName(f: AbstractFile): String = {
+ val nme = f.name
+ if (nme.endsWith(".scala"))
+ nme dropRight 6
+ else if (nme.endsWith(".java"))
+ nme dropRight 5
+ else
+ throw new FatalError("Unexpected source file ending: " + nme)
}
}
+import ClassPath._
/**
* Represents a package which contains classes and other packages
*/
abstract class ClassPath[T] {
+ type AnyClassRep = ClassPath[T]#ClassRep
+
/**
* The short name of the package (without prefix)
*/
def name: String
- val classes: List[ClassRep[T]]
- val packages: List[ClassPath[T]]
- val sourcepaths: List[AbstractFile]
/**
- * Find a ClassRep given a class name of the form "package.subpackage.ClassName".
- * Does not support nested classes on .NET
+ * A String representing the origin of this classpath element, if known.
+ * For example, the path of the directory or jar.
*/
- def findClass(name: String): Option[ClassRep[T]] = {
- val i = name.indexOf('.')
- if (i < 0) {
- classes.find(c => c.name == name)
- } else {
- val pkg = name.substring(0, i)
- val rest = name.substring(i + 1, name.length)
- packages.find(p => p.name == pkg).flatMap(_.findClass(rest))
- }
+ def origin: Option[String] = None
+
+ /** A list of URLs representing this classpath.
+ */
+ def asURLs: List[URL]
+
+ /** The whole classpath in the form of one String.
+ */
+ def asClasspathString: String
+
+ /** Info which should be propagated to any sub-classpaths.
+ */
+ def context: ClassPathContext[T]
+
+ /** Lists of entities.
+ */
+ def classes: List[AnyClassRep]
+ def packages: List[ClassPath[T]]
+ def sourcepaths: List[AbstractFile]
+
+ /** Information which entails walking the tree. This is probably only
+ * necessary for tracking down problems - it's normally not used.
+ */
+ def allPackages: List[ClassPath[T]] = packages ::: (packages flatMap (_.allPackages))
+ def allPackageNames: List[String] = {
+ def subpackages(prefix: String, cp: ClassPath[T]): List[String] = (
+ (cp.packages map (prefix + _.name)) :::
+ (cp.packages flatMap (x => subpackages(prefix + x.name + ".", x)))
+ )
+ subpackages("", this)
}
-}
+ def allPackagesWithNames: List[(String, ClassPath[T])] = {
+ val root = packages map (p => p.name -> p)
+ val subs =
+ for ((prefix, p) <- root ; (k, v) <- p.allPackagesWithNames) yield
+ (prefix + "." + k, v)
-/**
- * A Classpath containing source files
- */
-class SourcePath[T](dir: AbstractFile) extends ClassPath[T] {
- def name = dir.name
+ root ::: subs
+ }
- lazy val classes = {
- val cls = new ListBuffer[ClassRep[T]]
- for (f <- dir.iterator) {
- if (!f.isDirectory && ClassPath.validSourceFile(f.name))
- cls += ClassRep[T](None, Some(f))
+ /**
+ * Represents classes which can be loaded with a ClassfileLoader/MSILTypeLoader
+ * and / or a SourcefileLoader.
+ */
+ case class ClassRep(binary: Option[T], source: Option[AbstractFile]) {
+ def name: String = binary match {
+ case Some(x) => context.toBinaryName(x)
+ case _ =>
+ assert(source.isDefined)
+ toSourceName(source.get)
}
- cls.toList
}
- lazy val packages = {
- val pkg = new ListBuffer[SourcePath[T]]
- for (f <- dir.iterator) {
- if (f.isDirectory && ClassPath.validPackage(f.name))
- pkg += new SourcePath[T](f)
+ /** Filters for assessing validity of various entities.
+ */
+ def validClassFile(name: String) = (name endsWith ".class") && context.isValidName(name)
+ def validPackage(name: String) = (name != "META-INF") && (name != "") && (name.head != '.')
+ def validSourceFile(name: String) = validSourceExtensions exists (name endsWith _)
+ def validSourceExtensions = List(".scala", ".java")
+
+ /**
+ * Find a ClassRep given a class name of the form "package.subpackage.ClassName".
+ * Does not support nested classes on .NET
+ */
+ def findClass(name: String): Option[AnyClassRep] =
+ splitWhere(name, _ == '.', true) match {
+ case Some((pkg, rest)) =>
+ val rep = packages find (_.name == pkg) flatMap (_ findClass rest)
+ rep map {
+ case x: ClassRep => x
+ case x => throw new FatalError("Unexpected ClassRep '%s' found searching for name '%s'".format(x, name))
+ }
+ case _ =>
+ classes find (_.name == name)
}
- pkg.toList
- }
- val sourcepaths: List[AbstractFile] = List(dir)
+ def findSourceFile(name: String): Option[AbstractFile] =
+ findClass(name) match {
+ case Some(ClassRep(Some(x: AbstractFile), _)) => Some(x)
+ case _ => None
+ }
- override def toString() = "sourcepath: "+ dir.toString()
+ def sortString = asURLs map (_.toString) sorted
+ override def equals(that: Any) = that match {
+ case x: ClassPath[_] => this.sortString == x.sortString
+ case _ => false
+ }
+ override def hashCode = sortString.hashCode
}
/**
- * A directory (or a .jar file) containing classfiles and packages
+ * A Classpath containing source files
*/
-class DirectoryClassPath(dir: AbstractFile) extends ClassPath[AbstractFile] {
+class SourcePath[T](dir: AbstractFile, val context: ClassPathContext[T]) extends ClassPath[T] {
def name = dir.name
+ override def origin = dir.underlyingSource map (_.path)
+ def asURLs = dir.sfile.toList map (_.toURL)
+ def asClasspathString = dir.path
+ val sourcepaths: List[AbstractFile] = List(dir)
- lazy val classes = {
- val cls = new ListBuffer[ClassRep[AbstractFile]]
- for (f <- dir.iterator) {
- if (!f.isDirectory && ClassPath.validClassFile(f.name))
- cls += ClassRep(Some(f), None)
- }
- cls.toList
- }
+ lazy val classes: List[ClassRep] = dir collect {
+ case f if !f.isDirectory && validSourceFile(f.name) => ClassRep(None, Some(f))
+ } toList
- lazy val packages = {
- val pkg = new ListBuffer[DirectoryClassPath]
- for (f <- dir.iterator) {
- if (f.isDirectory && ClassPath.validPackage(f.name))
- pkg += new DirectoryClassPath(f)
- }
- pkg.toList
- }
+ lazy val packages: List[SourcePath[T]] = dir collect {
+ case f if f.isDirectory && validPackage(f.name) => new SourcePath[T](f, context)
+ } toList
- val sourcepaths: List[AbstractFile] = Nil
- override def toString() = "directory classpath: "+ dir.toString()
+ override def toString() = "sourcepath: "+ dir.toString()
}
-
-
/**
- * A assembly file (dll / exe) containing classes and namespaces
+ * A directory (or a .jar file) containing classfiles and packages
*/
-class AssemblyClassPath(types: Array[MSILType], namespace: String) extends ClassPath[MSILType] {
- def name = {
- val i = namespace.lastIndexOf('.')
- if (i < 0) namespace
- else namespace.substring(i + 1, namespace.length)
- }
-
- def this(assemFile: AbstractFile) {
- this(ClassPath.collectTypes(assemFile), "")
- }
-
- private lazy val first: Int = {
- var m = 0
- var n = types.length - 1
- while (m < n) {
- val l = (m + n) / 2
- val res = types(l).FullName.compareTo(namespace)
- if (res < 0) m = l + 1
- else n = l
- }
- if (types(m).FullName.startsWith(namespace)) m else types.length
- }
+class DirectoryClassPath(val dir: AbstractFile, val context: ClassPathContext[AbstractFile]) extends ClassPath[AbstractFile] {
+ def name = dir.name
+ override def origin = dir.underlyingSource map (_.path)
+ def asURLs = dir.sfile.toList map (_.toURL)
+ def asClasspathString = dir.path
+ val sourcepaths: List[AbstractFile] = Nil
- lazy val classes = {
- val cls = new ListBuffer[ClassRep[MSILType]]
- var i = first
- while (i < types.length && types(i).Namespace.startsWith(namespace)) {
- // CLRTypes used to exclude java.lang.Object and java.lang.String (no idea why..)
- if (types(i).Namespace == namespace)
- cls += ClassRep(Some(types(i)), None)
- i += 1
- }
- cls.toList
- }
+ lazy val classes: List[ClassRep] = dir collect {
+ case f if !f.isDirectory && validClassFile(f.name) => ClassRep(Some(f), None)
+ } toList
- lazy val packages = {
- val nsSet = new MutHashSet[String]
- var i = first
- while (i < types.length && types(i).Namespace.startsWith(namespace)) {
- val subns = types(i).Namespace
- if (subns.length > namespace.length) {
- // example: namespace = "System", subns = "System.Reflection.Emit"
- // => find second "." and "System.Reflection" to nsSet.
- val end = subns.indexOf('.', namespace.length + 1)
- nsSet += (if (end < 0) subns
- else subns.substring(0, end))
- }
- i += 1
- }
- for (ns <- nsSet.toList)
- yield new AssemblyClassPath(types, ns)
- }
+ lazy val packages: List[DirectoryClassPath] = dir collect {
+ case f if f.isDirectory && validPackage(f.name) => new DirectoryClassPath(f, context)
+ } toList
- val sourcepaths: List[AbstractFile] = Nil
- override def toString() = "assembly classpath "+ namespace
+ override def toString() = "directory classpath: "+ dir.toString()
}
/**
* A classpath unifying multiple class- and sourcepath entries.
*/
-abstract class MergedClassPath[T] extends ClassPath[T] {
- protected val entries: List[ClassPath[T]]
-
+class MergedClassPath[T](
+ val entries: List[ClassPath[T]],
+ val context: ClassPathContext[T])
+extends ClassPath[T] {
def name = entries.head.name
+ def asURLs = entries flatMap (_.asURLs)
+ lazy val sourcepaths: List[AbstractFile] = entries flatMap (_.sourcepaths)
+
+ override def origin = Some(entries map (x => x.origin getOrElse x.name) mkString ("Merged(", ", ", ")"))
+ override def asClasspathString: String = join(entries map (_.asClasspathString) : _*)
- lazy val classes: List[ClassRep[T]] = {
- val cls = new ListBuffer[ClassRep[T]]
+ lazy val classes: List[AnyClassRep] = {
+ val cls = new ListBuffer[AnyClassRep]
for (e <- entries; c <- e.classes) {
val name = c.name
- val idx = cls.indexWhere(cl => cl.name == name)
+ val idx = cls.indexWhere(_.name == name)
if (idx >= 0) {
val existing = cls(idx)
if (existing.binary.isEmpty && c.binary.isDefined)
@@ -295,7 +361,7 @@ abstract class MergedClassPath[T] extends ClassPath[T] {
val pkg = new ListBuffer[ClassPath[T]]
for (e <- entries; p <- e.packages) {
val name = p.name
- val idx = pkg.indexWhere(pk => pk.name == name)
+ val idx = pkg.indexWhere(_.name == name)
if (idx >= 0) {
pkg(idx) = addPackage(pkg(idx), p)
} else {
@@ -305,18 +371,46 @@ abstract class MergedClassPath[T] extends ClassPath[T] {
pkg.toList
}
- lazy val sourcepaths: List[AbstractFile] = entries.flatMap(_.sourcepaths)
+ private def addPackage(to: ClassPath[T], pkg: ClassPath[T]) = {
+ val newEntries = to match {
+ case cp: MergedClassPath[_] => cp.entries :+ pkg
+ case _ => List(to, pkg)
+ }
+ new MergedClassPath[T](newEntries, context)
+ }
- private def addPackage(to: ClassPath[T], pkg: ClassPath[T]) = to match {
- case cp: MergedClassPath[_] =>
- newMergedClassPath(cp.entries ::: List(pkg))
- case _ =>
- newMergedClassPath(List(to, pkg))
+ override def allPackages: List[ClassPath[T]] = entries flatMap (_.allPackages)
+ override def allPackageNames = entries flatMap (_.allPackageNames)
+ override def allPackagesWithNames = entries flatMap (_.allPackagesWithNames)
+
+ def duplicatedClasses = {
+ def toFullName(x: (String, _, AnyClassRep)) = x._1 + "." + x._3.name
+
+ /** Flatten everything into tuples, recombine grouped by name, filter down to 2+ entries. */
+ val flattened = (
+ for ((pkgName, pkg) <- allPackagesWithNames ; clazz <- pkg.classes) yield
+ (pkgName, pkg, clazz)
+ )
+ val multipleAppearingEntries = flattened groupBy toFullName filter (_._2.size > 1)
+
+ /** Using original name list as reference point, return duplicated entries as
+ * (name, list of origins)
+ * in the order they occur on the path.
+ */
+ for (name <- flattened map toFullName distinct ; dups <- multipleAppearingEntries get name) yield
+ (name, dups map {
+ case (_, cp, _) if cp.origin.isDefined => cp.origin.get
+ case (_, cp, _) => cp.asURLs.mkString
+ })
}
- private def newMergedClassPath(entrs: List[ClassPath[T]]): MergedClassPath[T] =
- new MergedClassPath[T] {
- protected val entries = entrs
+ def show {
+ println("ClassPath %s has %d entries and results in:\n".format(name, entries.size))
+ asClasspathString split ':' foreach (x => println(" " + x))
+ }
+ def showDuplicates =
+ ClassPath findDuplicates this foreach {
+ case (name, xs) => println(xs.mkString(name + ":\n ", "\n ", "\n"))
}
override def toString() = "merged classpath "+ entries.mkString("(", "\n", ")")
@@ -326,118 +420,8 @@ abstract class MergedClassPath[T] extends ClassPath[T] {
* The classpath when compiling with target:jvm. Binary files (classfiles) are represented
* as AbstractFile. nsc.io.ZipArchive is used to view zip/jar archives as directories.
*/
-class JavaClassPath(boot: String, ext: String, user: String, source: String, Xcodebase: String)
-extends MergedClassPath[AbstractFile] {
-
- protected val entries: List[ClassPath[AbstractFile]] = assembleEntries()
- private def assembleEntries(): List[ClassPath[AbstractFile]] = {
- import ClassPath._
- val etr = new ListBuffer[ClassPath[AbstractFile]]
-
- def addFilesInPath(path: String, expand: Boolean,
- ctr: AbstractFile => ClassPath[AbstractFile] = x => new DirectoryClassPath(x)) {
- for (fileName <- expandPath(path, expandStar = expand)) {
- val file = AbstractFile.getDirectory(fileName)
- if (file ne null) etr += ctr(file)
- }
- }
-
- // 1. Boot classpath
- addFilesInPath(boot, false)
-
- // 2. Ext classpath
- for (fileName <- expandPath(ext, expandStar = false)) {
- val dir = AbstractFile.getDirectory(fileName)
- if (dir ne null) {
- for (file <- dir) {
- val name = file.name.toLowerCase
- if (name.endsWith(".jar") || name.endsWith(".zip") || file.isDirectory) {
- val archive = AbstractFile.getDirectory(new File(dir.file, name))
- if (archive ne null) etr += new DirectoryClassPath(archive)
- }
- }
- }
- }
-
- // 3. User classpath
- addFilesInPath(user, true)
-
- // 4. Codebase entries (URLs)
- {
- val urlSeparator = " "
- val urlStrtok = new StringTokenizer(Xcodebase, urlSeparator)
- while (urlStrtok.hasMoreTokens()) try {
- val url = new URL(urlStrtok.nextToken())
- val archive = AbstractFile.getURL(url)
- if (archive ne null) etr += new DirectoryClassPath(archive)
- }
- catch {
- case e =>
- Console.println("error adding classpath form URL: " + e.getMessage)//debug
- throw e
- }
- }
-
- // 5. Source path
- if (source != "")
- addFilesInPath(source, false, x => new SourcePath[AbstractFile](x))
-
- etr.toList
- }
-}
-
-/**
- * The classpath when compiling with target:msil. Binary files are represented as
- * MSILType values.
- */
-class MsilClassPath(ext: String, user: String, source: String) extends MergedClassPath[MSILType] {
- protected val entries: List[ClassPath[MSILType]] = assembleEntries()
-
- private def assembleEntries(): List[ClassPath[MSILType]] = {
- import ClassPath._
- val etr = new ListBuffer[ClassPath[MSILType]]
- val names = new MutHashSet[String]
-
- // 1. Assemblies from -Xassem-extdirs
- for (dirName <- expandPath(ext, expandStar = false)) {
- val dir = AbstractFile.getDirectory(dirName)
- if (dir ne null) {
- for (file <- dir) {
- val name = file.name.toLowerCase
- if (name.endsWith(".dll") || name.endsWith(".exe")) {
- names += name
- etr += new AssemblyClassPath(file)
- }
- }
- }
- }
-
- // 2. Assemblies from -Xassem-path
- for (fileName <- expandPath(user, expandStar = false)) {
- val file = AbstractFile.getFile(fileName)
- if (file ne null) {
- val name = file.name.toLowerCase
- if (name.endsWith(".dll") || name.endsWith(".exe")) {
- names += name
- etr += new AssemblyClassPath(file)
- }
- }
- }
-
- def check(n: String) {
- if (!names.contains(n))
- throw new AssertionError("Cannot find assembly "+ n +
- ". Use -Xassem-extdirs or -Xassem-path to specify its location")
- }
- check("mscorlib.dll")
- check("scalaruntime.dll")
-
- // 3. Source path
- for (dirName <- expandPath(source, expandStar = false)) {
- val file = AbstractFile.getDirectory(dirName)
- if (file ne null) etr += new SourcePath[MSILType](file)
- }
-
- etr.toList
- }
+class JavaClassPath(
+ containers: List[ClassPath[AbstractFile]],
+ context: JavaContext)
+extends MergedClassPath[AbstractFile](containers, context) {
}
diff --git a/src/compiler/scala/tools/nsc/util/CommandLineParser.scala b/src/compiler/scala/tools/nsc/util/CommandLineParser.scala
new file mode 100644
index 0000000000..869c6a97e9
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/util/CommandLineParser.scala
@@ -0,0 +1,142 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package util
+
+import scala.util.parsing.combinator._
+import scala.util.parsing.input.{ Reader }
+import scala.util.parsing.input.CharArrayReader.EofCh
+import scala.collection.mutable.ListBuffer
+
+/** A simple command line parser to replace the several different
+ * simple ones spread around trunk.
+ */
+
+trait ParserUtil extends Parsers {
+ class ParserPlus[+T](underlying: Parser[T]) {
+ def !~>[U](p: => Parser[U]): Parser[U] = (underlying ~! p) ^^ { case a~b => b }
+ def <~![U](p: => Parser[U]): Parser[T] = (underlying ~! p) ^^ { case a~b => a }
+ }
+ protected implicit def parser2parserPlus[T](p: Parser[T]): ParserPlus[T] = new ParserPlus(p)
+}
+
+case class CommandLine(
+ args: List[String],
+ unaryOptions: List[String],
+ binaryOptions: List[String]
+) {
+ def this(args: List[String]) = this(args, Nil, Nil)
+ def this(args: Array[String]) = this(args.toList, Nil, Nil)
+ def this(line: String) = this(CommandLineParser tokenize line, Nil, Nil)
+
+ def withUnary(xs: List[String]) = copy(unaryOptions = xs)
+ def withBinary(xs: List[String]) = copy(binaryOptions = xs)
+
+ def allOptions = unaryOptions ++ binaryOptions
+ def originalArgs = args
+ def assumeBinary = true
+ def enforceArity = true
+ def onlyKnownOptions = false
+
+ val Terminator = "--"
+ val ValueForUnaryOption = "true" // so if --opt is given, x(--opt) = true
+
+ def mapForUnary(opt: String) = Map(opt -> ValueForUnaryOption)
+ def errorFn(msg: String) = println(msg)
+
+ /** argMap is option -> argument (or "" if it is a unary argument)
+ * residualArgs are what is left after removing the options and their args.
+ */
+ lazy val (argMap, residualArgs) = {
+ val residualBuffer = new ListBuffer[String]
+
+ def stripQuotes(s: String) = {
+ def isQuotedBy(c: Char) = s.length > 0 && s.head == c && s.last == c
+ if (List('"', '\'') exists isQuotedBy) s.tail.init else s
+ }
+
+ def isValidOption(s: String) = !onlyKnownOptions || (unaryOptions contains s) || (binaryOptions contains s)
+ def isOption(s: String) = (s startsWith "-") && (isValidOption(s) || { unknownOption(s) ; false })
+ def isUnary(s: String) = isOption(s) && (unaryOptions contains s)
+ def isBinary(s: String) = isOption(s) && !isUnary(s) && (assumeBinary || (binaryOptions contains s))
+
+ def unknownOption(opt: String) =
+ errorFn("Option '%s' not recognized.".format(opt))
+ def missingArg(opt: String, what: String) =
+ errorFn("Option '%s' requires argument, found %s instead.".format(opt, what))
+
+ def loop(args: List[String]): Map[String, String] = {
+ def residual(xs: List[String]) = { residualBuffer ++= xs ; Map[String, String]() }
+ if (args.isEmpty) return Map()
+ val hd :: rest = args
+ if (rest.isEmpty) {
+ if (isBinary(hd) && enforceArity)
+ missingArg(hd, "EOF")
+
+ if (isOption(hd)) mapForUnary(hd) else residual(args)
+ }
+ else
+ if (hd == Terminator) residual(rest)
+ else {
+ val hd1 :: hd2 :: rest = args
+
+ if (hd2 == Terminator) mapForUnary(hd1) ++ residual(rest)
+ else if (isUnary(hd1)) mapForUnary(hd1) ++ loop(hd2 :: rest)
+ else if (isBinary(hd1)) {
+ // Disabling this check so
+ // --scalacopts "-verbose" works. We can't tell if it's quoted,
+ // the shell does us in.
+ //
+ // if (isOption(hd2) && enforceArity)
+ // missingArg(hd1, hd2)
+
+ Map(hd1 -> hd2) ++ loop(rest)
+ }
+ else { residual(List(hd1)) ++ loop(hd2 :: rest) }
+ }
+ }
+
+ (loop(args), residualBuffer map stripQuotes toList)
+ }
+
+ def isSet(arg: String) = args contains arg
+ def get(arg: String) = argMap get arg
+ def getOrElse(arg: String, orElse: => String) = if (isSet(arg)) apply(arg) else orElse
+ def apply(arg: String) = argMap(arg)
+
+ override def toString() = "CommandLine(\n%s)\n" format (args map (" " + _ + "\n") mkString)
+}
+
+object CommandLineParser extends RegexParsers with ParserUtil {
+ override def skipWhitespace = false
+
+ def elemExcept(xs: Elem*): Parser[Elem] = elem("elemExcept", x => x != EofCh && !(xs contains x))
+ def elemOf(xs: Elem*): Parser[Elem] = elem("elemOf", xs contains _)
+ def escaped(ch: Char): Parser[String] = "\\" + ch
+ def mkQuoted(ch: Char): Parser[String] = (
+ elem(ch) !~> rep(escaped(ch) | elemExcept(ch)) <~ ch ^^ (_.mkString)
+ | failure("Unmatched %s in input." format ch)
+ )
+
+ /** Apparently windows can't deal with the quotes sticking around. */
+ lazy val squoted: Parser[String] = mkQuoted('\'') // ^^ (x => "'%s'" format x)
+ lazy val dquoted: Parser[String] = mkQuoted('"') // ^^ (x => "\"" + x + "\"")
+ lazy val token: Parser[String] = """\S+""".r
+
+ lazy val argument: Parser[String] = squoted | dquoted | token
+ lazy val commandLine: Parser[List[String]] = phrase(repsep(argument, whiteSpace))
+
+ class ParseException(msg: String) extends RuntimeException(msg)
+
+ def tokenize(line: String): List[String] = tokenize(line, x => throw new ParseException(x))
+ def tokenize(line: String, errorFn: String => Unit): List[String] = {
+ parse(commandLine, line.trim) match {
+ case Success(args, _) => args
+ case NoSuccess(msg, rest) => errorFn(msg) ; Nil
+ }
+ }
+ def apply(line: String) = new CommandLine(tokenize(line))
+}
diff --git a/src/compiler/scala/tools/nsc/util/CommandLineSpec.scala b/src/compiler/scala/tools/nsc/util/CommandLineSpec.scala
new file mode 100644
index 0000000000..826255e86f
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/util/CommandLineSpec.scala
@@ -0,0 +1,150 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package util
+
+import Properties._
+import io._
+import CommandLineSpec._
+import CommandLineParser.tokenize
+
+/** This trait works together with CommandLine to allow declaratively
+ * specifying a command line program, with many attendant benefits.
+ * See scala.tools.partest.PartestSpec for a full example.
+ */
+
+trait CommandLineSpec {
+ def parsed: CommandLine
+ def isReferenceSpec: Boolean = false
+ def isPassthroughProperty(name: String): Boolean = false
+ def isSysPropOption(key: String): Option[String] = None
+
+ protected var _expandingOptions: Map[String, List[String]] = Map()
+
+ private var _helpMessage: String = ""
+ private var _unaryOptions: List[String] = Nil
+ private var _binaryOptions: List[String] = Nil
+ private def allOptions = if (isReferenceSpec) Nil else parsed.allOptions
+ private def longestArg = if (allOptions.isEmpty) 1 else allOptions map (_.length) max
+ private def unquoted(s: String) = {
+ def isQuoted = (s.head == '\'' || s.head == '"') && s.head == s.last
+
+ if (s == null || s.length < 2 || !isQuoted) s
+ else s drop 1 dropRight 1
+ }
+
+ protected def help(str: String) = if (isReferenceSpec) () else _helpMessage += (str.stripMargin + "\n")
+ protected def heading(s: String) = if (isReferenceSpec) () else help("\n " + s)
+
+ /** The various operators:
+ * val isCond1 = "cond1" ? // --cond1 is unary, cond1 is boolean
+ * "cond2" ?> body // --cond2 is unary, body is executed if it is given
+ * "cond3" ?+> List(x1, x2...) // --cond3 is unary, arguments on rhs will be substituted in as if given
+ * val val1 = "val1" |> "alt" // --val1 is binary, val1 is String, alt used if none given
+ * val val2 = "val2" >> // --val2 is binary, val2 is Option[String], None if none given
+ */
+ protected class OptionStringAdditions(name: String) {
+ val s = toOpt(name)
+ def ?+>(args: List[String]): Unit = { _unaryOptions +:= s ; if (isReferenceSpec) _expandingOptions += (name -> args) }
+
+ def ? : Boolean = { _unaryOptions +:= s ; if (isReferenceSpec) false else parsed isSet s }
+ def ?>(body: => Unit): Unit = { _unaryOptions +:= s ; if (isReferenceSpec) () else if (parsed isSet s) body }
+ def |>(alt: String): String = { _binaryOptions +:= s ; if (isReferenceSpec) "" else parsed.getOrElse(s, alt) }
+ def >> : Option[String] = { _binaryOptions +:= s ; if (isReferenceSpec) None else parsed get s }
+
+ def /(description: String) = {
+ val formatStr = " %-" + longestArg + "s %s"
+ help(formatStr.format(s, description))
+
+ name
+ }
+ }
+ protected implicit def stringAdditions(s: String) = new OptionStringAdditions(s)
+
+ lazy val unaryOptions = _unaryOptions.distinct
+ lazy val binaryOptions = _binaryOptions.distinct
+ lazy val expandingOptions = _expandingOptions.keys.toList
+ lazy val helpMsg = _helpMessage
+
+ def isUnaryOption(s: String) = unaryOptions contains toOpt(s)
+ def isBinaryOption(s: String) = binaryOptions contains toOpt(s)
+ def isExpandingOption(s: String) = expandingOptions contains toOpt(s)
+
+ private def sysPropToOptions(k: String, v: String): List[String] = {
+ if (isPassthroughProperty(k)) toArgs(v)
+ else isSysPropOption(k).toList flatMap { optName =>
+ val opt = toOpt(optName)
+
+ if (isUnaryOption(optName)) List(opt)
+ else if (isBinaryOption(optName)) List(opt, v)
+ else {
+ if (warnSuspiciousProperties) {
+ println("Warning, this looks like a command line option but I don't understand it.")
+ println("Ignoring: " + k + "=" + v)
+ }
+ Nil
+ }
+ }
+ }
+ def warnSuspiciousProperties: Boolean = true
+ def sysPropsAsOptions() = allSystemProperties.toList flatMap (sysPropToOptions _).tupled
+
+ def isSet(s: String) = parsed isSet toOpt(s)
+ def reconstruct: List[String] = {
+ val unary = unaryOptions filter (parsed isSet _)
+ val binary = binaryOptions collect { case x if parsed isSet x => List(x, parsed(x)) }
+ val resid = parsed.residualArgs
+
+ unary ++ binary.flatten ++ resid
+ }
+
+ def bashCompletion(programName: String) = {
+ val opts = unaryOptions ++ binaryOptions
+ bashCompletionTemplate.replaceAll("@@PROGRAM@@", programName).replaceAll("@@OPTIONS@@", opts mkString " ")
+ }
+}
+
+trait CommandLineReferenceSpec extends CommandLineSpec {
+ final override def isReferenceSpec: Boolean = true
+ final def apply(args: String*) = creator(args.toList flatMap expandArg)
+
+ protected lazy val expansionMap = _expandingOptions
+ protected def creator(args: List[String]) = new ThisCommandLine(args)
+ protected def expandArg(arg: String) = expansionMap.getOrElse(fromOpt(arg), List(arg))
+
+ class ThisCommandLine(args: List[String]) extends CommandLine(args, unaryOptions, binaryOptions) {
+ }
+}
+
+object CommandLineSpec {
+ def toOpt(s: String) = if (s startsWith "--") s else "--" + s
+ def fromOpt(s: String) = s stripPrefix "--"
+ def toArgs(line: String) = tokenize(line)
+ def fromArgs(args: List[String]) = args mkString " "
+
+ def allSystemProperties: Map[String, String] = {
+ import collection.JavaConversions._
+
+ System.getProperties.toMap
+ }
+
+ /** A very simple template for generating bash completion functions.
+ */
+ val bashCompletionTemplate = """
+ |_@@PROGRAM@@()
+ |{
+ | local cur opts base
+ | COMPREPLY=()
+ | cur="${COMP_WORDS[COMP_CWORD]}"
+ | opts="@@OPTIONS@@"
+ |
+ | COMPREPLY=($(compgen -W "${opts}" -- ${cur}))
+ | _filedir
+ | return 0
+ |}
+ |complete -F _@@PROGRAM@@ @@PROGRAM@@
+ """.stripMargin
+}
diff --git a/src/compiler/scala/tools/nsc/util/DocStrings.scala b/src/compiler/scala/tools/nsc/util/DocStrings.scala
index 3392ef0577..7ca2722a54 100755
--- a/src/compiler/scala/tools/nsc/util/DocStrings.scala
+++ b/src/compiler/scala/tools/nsc/util/DocStrings.scala
@@ -31,7 +31,7 @@ object DocStrings {
/** Returns index of string `str` after `start` skipping longest
* sequence of space and tab characters, possibly also containing
- * a single `*' character.
+ * a single `*' character or the `/``**` sequence.
* @pre start == str.length || str(start) == `\n'
*/
def skipLineLead(str: String, start: Int): Int =
@@ -39,16 +39,19 @@ object DocStrings {
else {
val idx = skipWhitespace(str, start + 1)
if (idx < str.length && (str charAt idx) == '*') skipWhitespace(str, idx + 1)
+ else if (idx + 2 < str.length && (str charAt idx) == '/' && (str charAt (idx + 1)) == '*' && (str charAt (idx + 2)) == '*')
+ skipWhitespace(str, idx + 3)
else idx
}
- /** Skips to next occurrence of `\n' following index `start`.
+ /** Skips to next occurrence of `\n' or to the position after the `/``**` sequence following index `start`.
*/
def skipToEol(str: String, start: Int): Int =
- if (start < str.length && (str charAt start) != '\n') skipToEol(str, start + 1)
+ if (start + 2 < str.length && (str charAt start) == '/' && (str charAt (start + 1)) == '*' && (str charAt (start + 2)) == '*') start + 3
+ else if (start < str.length && (str charAt start) != '\n') skipToEol(str, start + 1)
else start
- /** Returns first index following `start` and starting a line (i.e. after skipLineLead)
+ /** Returns first index following `start` and starting a line (i.e. after skipLineLead) or starting the comment
* which satisfies predicate `p'.
*/
def findNext(str: String, start: Int)(p: Int => Boolean): Int = {
@@ -70,7 +73,7 @@ object DocStrings {
* pairs of start/end positions of all tagged sections in the string.
* Every section starts with a `@' and extends to the next `@', or
* to the end of the comment string, but excluding the final two
- * charcters which terminate the comment.
+ * characters which terminate the comment.
*/
def tagIndex(str: String, p: Int => Boolean = (idx => true)): List[(Int, Int)] =
findAll(str, 0) (idx => str(idx) == '@' && p(idx)) match {
diff --git a/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala b/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala
index 731e04e4dc..ddefcc04ee 100644
--- a/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala
+++ b/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala
@@ -7,7 +7,7 @@
package scala.tools.nsc
package util
-import Chars.{LF, FF, CR, SU}
+import Chars._
class JavaCharArrayReader(buf: IndexedSeq[Char], start: Int, /* startline: int, startcol: int, */
decodeUni: Boolean, error: String => Unit) extends Iterator[Char] with Cloneable {
@@ -121,15 +121,4 @@ class JavaCharArrayReader(buf: IndexedSeq[Char], start: Int, /* startline: int,
def copy: JavaCharArrayReader =
new JavaCharArrayReader(buf, bp, /* nextcol, nextline, */ decodeUni, error)
-
- def digit2int(ch: Char, base: Int): Int = {
- if ('0' <= ch && ch <= '9' && ch < '0' + base)
- ch - '0'
- else if ('A' <= ch && ch < 'A' + base - 10)
- ch - 'A' + 10
- else if ('a' <= ch && ch < 'a' + base - 10)
- ch - 'a' + 10
- else
- -1
- }
}
diff --git a/src/compiler/scala/tools/nsc/util/MsilClassPath.scala b/src/compiler/scala/tools/nsc/util/MsilClassPath.scala
new file mode 100644
index 0000000000..5511326a6b
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/util/MsilClassPath.scala
@@ -0,0 +1,169 @@
+/* NSC -- new Scala compiler
+ * Copyright 2006-2010 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+// $Id$
+
+package scala.tools.nsc
+package util
+
+import java.io.File
+import java.net.URL
+import java.util.StringTokenizer
+import scala.util.Sorting
+
+import scala.collection.mutable.{ ListBuffer, HashSet => MutHashSet }
+import scala.tools.nsc.io.AbstractFile
+
+import ch.epfl.lamp.compiler.msil.{ Type => MSILType, Assembly }
+import ClassPath.{ ClassPathContext, isTraitImplementation }
+
+/** Keeping the MSIL classpath code in its own file is important to make sure
+ * we don't accidentally introduce a dependency on msil.jar in the jvm.
+ */
+
+object MsilClassPath {
+ def collectTypes(assemFile: AbstractFile) = {
+ var res: Array[MSILType] = MSILType.EmptyTypes
+ val assem = Assembly.LoadFrom(assemFile.path)
+ if (assem != null) {
+ // DeclaringType == null: true for non-inner classes
+ res = assem.GetTypes() filter (_.DeclaringType == null)
+ Sorting.stableSort(res, (t1: MSILType, t2: MSILType) => (t1.FullName compareTo t2.FullName) < 0)
+ }
+ res
+ }
+
+ /** On the java side this logic is in PathResolver, but as I'm not really
+ * up to folding MSIL into that, I am encapsulating it here.
+ */
+ def fromSettings(settings: Settings): MsilClassPath = {
+ val context =
+ if (settings.inline.value) new MsilContext
+ else new MsilContext { override def isValidName(name: String) = !isTraitImplementation(name) }
+
+ import settings._
+ new MsilClassPath(assemextdirs.value, assemrefs.value, sourcepath.value, context)
+ }
+
+ class MsilContext extends ClassPathContext[MSILType] {
+ def toBinaryName(rep: MSILType) = rep.Name
+ def newClassPath(assemFile: AbstractFile) = new AssemblyClassPath(MsilClassPath collectTypes assemFile, "", this)
+ }
+
+ private def assembleEntries(ext: String, user: String, source: String, context: MsilContext): List[ClassPath[MSILType]] = {
+ import ClassPath._
+ val etr = new ListBuffer[ClassPath[MSILType]]
+ val names = new MutHashSet[String]
+
+ // 1. Assemblies from -Xassem-extdirs
+ for (dirName <- expandPath(ext, expandStar = false)) {
+ val dir = AbstractFile.getDirectory(dirName)
+ if (dir ne null) {
+ for (file <- dir) {
+ val name = file.name.toLowerCase
+ if (name.endsWith(".dll") || name.endsWith(".exe")) {
+ names += name
+ etr += context.newClassPath(file)
+ }
+ }
+ }
+ }
+
+ // 2. Assemblies from -Xassem-path
+ for (fileName <- expandPath(user, expandStar = false)) {
+ val file = AbstractFile.getFile(fileName)
+ if (file ne null) {
+ val name = file.name.toLowerCase
+ if (name.endsWith(".dll") || name.endsWith(".exe")) {
+ names += name
+ etr += context.newClassPath(file)
+ }
+ }
+ }
+
+ def check(n: String) {
+ if (!names.contains(n))
+ throw new AssertionError("Cannot find assembly "+ n +
+ ". Use -Xassem-extdirs or -Xassem-path to specify its location")
+ }
+ check("mscorlib.dll")
+ check("scalaruntime.dll")
+
+ // 3. Source path
+ for (dirName <- expandPath(source, expandStar = false)) {
+ val file = AbstractFile.getDirectory(dirName)
+ if (file ne null) etr += new SourcePath[MSILType](file, context)
+ }
+
+ etr.toList
+ }
+}
+import MsilClassPath._
+
+/**
+ * A assembly file (dll / exe) containing classes and namespaces
+ */
+class AssemblyClassPath(types: Array[MSILType], namespace: String, val context: MsilContext) extends ClassPath[MSILType] {
+ def name = {
+ val i = namespace.lastIndexOf('.')
+ if (i < 0) namespace
+ else namespace drop (i + 1)
+ }
+ def asURLs = List(new java.net.URL(name))
+ def asClasspathString = error("Unknown") // I don't know what if anything makes sense here?
+
+ private lazy val first: Int = {
+ var m = 0
+ var n = types.length - 1
+ while (m < n) {
+ val l = (m + n) / 2
+ val res = types(l).FullName.compareTo(namespace)
+ if (res < 0) m = l + 1
+ else n = l
+ }
+ if (types(m).FullName.startsWith(namespace)) m else types.length
+ }
+
+ lazy val classes = {
+ val cls = new ListBuffer[ClassRep]
+ var i = first
+ while (i < types.length && types(i).Namespace.startsWith(namespace)) {
+ // CLRTypes used to exclude java.lang.Object and java.lang.String (no idea why..)
+ if (types(i).Namespace == namespace)
+ cls += ClassRep(Some(types(i)), None)
+ i += 1
+ }
+ cls.toList
+ }
+
+ lazy val packages = {
+ val nsSet = new MutHashSet[String]
+ var i = first
+ while (i < types.length && types(i).Namespace.startsWith(namespace)) {
+ val subns = types(i).Namespace
+ if (subns.length > namespace.length) {
+ // example: namespace = "System", subns = "System.Reflection.Emit"
+ // => find second "." and "System.Reflection" to nsSet.
+ val end = subns.indexOf('.', namespace.length + 1)
+ nsSet += (if (end < 0) subns
+ else subns.substring(0, end))
+ }
+ i += 1
+ }
+ for (ns <- nsSet.toList)
+ yield new AssemblyClassPath(types, ns, context)
+ }
+
+ val sourcepaths: List[AbstractFile] = Nil
+
+ override def toString() = "assembly classpath "+ namespace
+}
+
+/**
+ * The classpath when compiling with target:msil. Binary files are represented as
+ * MSILType values.
+ */
+class MsilClassPath(ext: String, user: String, source: String, context: MsilContext)
+extends MergedClassPath[MSILType](MsilClassPath.assembleEntries(ext, user, source, context), context) { } \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala b/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala
index 95110d6b81..5b1471c90d 100644
--- a/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala
+++ b/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala
@@ -12,8 +12,10 @@ import java.net.URL
import ScalaClassLoader._
import scala.util.control.Exception.{ catching }
-trait ScalaClassLoader extends JavaClassLoader
-{
+trait ScalaClassLoader extends JavaClassLoader {
+ /** Override to see classloader activity traced */
+ protected def trace: Boolean = false
+
/** Executing an action with this classloader as context classloader */
def asContext[T](action: => T): T = {
val oldLoader = getContextLoader
@@ -30,6 +32,9 @@ trait ScalaClassLoader extends JavaClassLoader
/** Load, link and initialize a class with this classloader */
def tryToInitializeClass[T <: AnyRef](path: String): Option[Class[T]] = tryClass(path, true)
+ private def tryBody[T <: AnyRef](body: => Any): Option[T] =
+ catching(classOf[ClassNotFoundException], classOf[SecurityException]) opt body.asInstanceOf[T]
+
private def tryClass[T <: AnyRef](path: String, initialize: Boolean): Option[Class[T]] =
catching(classOf[ClassNotFoundException], classOf[SecurityException]) opt
Class.forName(path, initialize, this).asInstanceOf[Class[T]]
@@ -42,6 +47,27 @@ trait ScalaClassLoader extends JavaClassLoader
}
}
+ override def findClass(name: String) = {
+ val result = super.findClass(name)
+ if (trace) println("findClass(%s) = %s".format(name, result))
+ result
+ }
+
+ override def loadClass(name: String, resolve: Boolean) = {
+ val result = super.loadClass(name, resolve)
+ if (trace) println("loadClass(%s, %s) = %s".format(name, resolve, result))
+ result
+ }
+
+ /** The actual bytes for a class file, or an empty array if it can't be found. */
+ def findBytesForClassName(s: String): Array[Byte] = {
+ val name = s.replaceAll("""\.""", "/") + ".class"
+ val url = this.getResource(name)
+
+ if (url == null) Array()
+ else new io.Streamable.Bytes { def inputStream() = url.openStream } . toByteArray()
+ }
+
/** Run the main method of a class to be loaded by this classloader */
def run(objectName: String, arguments: Seq[String]) {
val clsToRun = tryToInitializeClass(objectName) getOrElse (
@@ -56,13 +82,20 @@ trait ScalaClassLoader extends JavaClassLoader
}
}
-
object ScalaClassLoader {
class URLClassLoader(urls: Seq[URL], parent: JavaClassLoader)
extends java.net.URLClassLoader(urls.toArray, parent)
with ScalaClassLoader {
+
+ private var classloaderURLs = urls.toList
+
/** Override to widen to public */
- override def addURL(url: URL) = super.addURL(url)
+ override def addURL(url: URL) = {
+ classloaderURLs +:= url
+ super.addURL(url)
+ }
+
+ override def toString = urls.mkString("URLClassLoader(\n ", "\n ", "\n)\n")
}
def setContextLoader(cl: JavaClassLoader) = Thread.currentThread.setContextClassLoader(cl)
@@ -70,8 +103,8 @@ object ScalaClassLoader {
def getSystemLoader(): ScalaClassLoader = new JavaClassLoader(JavaClassLoader.getSystemClassLoader()) with ScalaClassLoader
def defaultParentClassLoader() = findExtClassLoader()
- def fromURLs(urls: Seq[URL]): URLClassLoader =
- new URLClassLoader(urls.toList, defaultParentClassLoader())
+ def fromURLs(urls: Seq[URL], parent: ClassLoader = defaultParentClassLoader()): URLClassLoader =
+ new URLClassLoader(urls.toList, parent)
/** True if supplied class exists in supplied path */
def classExists(urls: Seq[URL], name: String): Boolean =
@@ -91,11 +124,8 @@ object ScalaClassLoader {
search(getContextLoader())
}
- def findBytesForClassName(s: String): Array[Byte] = {
- val name = s.replaceAll("""\.""", "/") + ".class"
- val url = getSystemLoader.getResource(name)
-
- if (url == null) Array()
- else new io.Streamable.Bytes { def inputStream() = url.openStream } . toByteArray()
- }
+ /** Finding what jar a clazz or instance came from */
+ def origin(x: Any): Option[URL] = originOfClass(x.asInstanceOf[AnyRef].getClass)
+ def originOfClass(x: Class[_]): Option[URL] =
+ Option(x.getProtectionDomain.getCodeSource) flatMap (x => Option(x.getLocation))
}
diff --git a/src/compiler/scala/tools/nsc/util/ShowPickled.scala b/src/compiler/scala/tools/nsc/util/ShowPickled.scala
index 1f181d36ed..197eb28661 100644
--- a/src/compiler/scala/tools/nsc/util/ShowPickled.scala
+++ b/src/compiler/scala/tools/nsc/util/ShowPickled.scala
@@ -12,13 +12,47 @@ import java.lang.Long.toHexString
import java.lang.Float.intBitsToFloat
import java.lang.Double.longBitsToDouble
-import symtab.{Flags, Names}
-import symtab.classfile.{PickleBuffer, PickleFormat}
+import symtab.{ Flags, Names }
+import scala.reflect.generic.{ PickleBuffer, PickleFormat }
+import interpreter.ByteCode.scalaSigBytesForPath
object ShowPickled extends Names {
-
import PickleFormat._
+ case class PickleBufferEntry(num: Int, startIndex: Int, tag: Int, bytes: Array[Byte]) {
+ def isName = tag == TERMname || tag == TYPEname
+ def hasName = tag match {
+ case TYPEsym | ALIASsym | CLASSsym | MODULEsym | VALsym | EXTref | EXTMODCLASSref => true
+ case _ => false
+ }
+ def readName =
+ if (isName) new String(bytes, "UTF-8")
+ else error("%s is no name" format tagName)
+ def nameIndex =
+ if (hasName) readNat(bytes, 0)
+ else error("%s has no name" format tagName)
+
+ def tagName = tag2string(tag)
+ override def toString = "%d,%d: %s".format(num, startIndex, tagName)
+ }
+
+ case class PickleBufferEntryList(entries: IndexedSeq[PickleBufferEntry]) {
+ def nameAt(idx: Int) = {
+ val entry = entries(idx)
+ if (entry.isName) entry.readName
+ else if (entry.hasName) entries(entry.nameIndex).readName
+ else "?"
+ }
+ }
+
+ def makeEntryList(buf: PickleBuffer, index: Array[Int]) = {
+ val entries = buf.toIndexedSeq.zipWithIndex map {
+ case ((tag, data), num) => PickleBufferEntry(num, index(num), tag, data)
+ }
+
+ PickleBufferEntryList(entries)
+ }
+
def tag2string(tag: Int): String = tag match {
case TERMname => "TERMname"
case TYPEname => "TYPEname"
@@ -69,21 +103,43 @@ object ShowPickled extends Names {
case _ => "***BAD TAG***(" + tag + ")"
}
- def printFile(buf: PickleBuffer, out: PrintStream) {
+ /** Extremely regrettably, essentially copied from PickleBuffer.
+ */
+ def readNat(data: Array[Byte], index: Int): Int = {
+ var idx = index
+ var result = 0L
+ var b = 0L
+ do {
+ b = data(idx)
+ idx += 1
+ result = (result << 7) + (b & 0x7f)
+ } while((b & 0x80) != 0L)
+
+ result.toInt
+ }
+
+ def printFile(buf: PickleBuffer, out: PrintStream): Unit = printFile(buf, out, false)
+ def printFile(buf: PickleBuffer, out: PrintStream, bare: Boolean) {
out.println("Version " + buf.readNat() + "." + buf.readNat())
val index = buf.createIndex
+ val entryList = makeEntryList(buf, index)
+ buf.readIndex = 0
+
+ /** A print wrapper which discards everything if bare is true.
+ */
+ def p(s: String) = if (!bare) out print s
def printNameRef() {
- val x = buf.readNat()
- val savedIndex = buf.readIndex
- buf.readIndex = index(x)
- val tag = buf.readByte()
- val len = buf.readNat()
- out.print(" " + x + "(" + newTermName(buf.bytes, buf.readIndex, len) + ")")
- buf.readIndex = savedIndex
+ val idx = buf.readNat()
+ val name = entryList nameAt idx
+ val toPrint = if (bare) " " + name else " %s(%s)".format(idx, name)
+
+ out print toPrint
}
- def printNat() = out.print(" " + buf.readNat())
+ def printNat() = p(" " + buf.readNat())
+ def printReadNat(x: Int) = p(" " + x)
+
def printSymbolRef() = printNat()
def printTypeRef() = printNat()
def printConstantRef() = printNat()
@@ -91,13 +147,38 @@ object ShowPickled extends Names {
def printConstAnnotArgRef() = printNat()
def printAnnotArgRef() = printNat()
- def printSymInfo() {
+ def printSymInfo(end: Int) {
printNameRef()
printSymbolRef()
val pflags = buf.readLongNat()
- out.print(" " + toHexString(pflags) +
- "[" + Flags.flagsToString(Flags.pickledToRawFlags(pflags)) + "] ")
- printTypeRef()
+ def printFlags(privateWithin: Option[Int]) = {
+ val accessBoundary = (
+ for (idx <- privateWithin) yield {
+ val s = entryList nameAt idx
+ if (bare) s else idx + "(" + s + ")"
+ }
+ )
+ val flagString = {
+ val arg1 = Flags.pickledToRawFlags(pflags)
+ accessBoundary match {
+ case Some(pw) => Flags.flagsToString(arg1, pw)
+ case _ => Flags.flagsToString(arg1)
+ }
+ }
+
+ out.print(" %s[%s]".format(toHexString(pflags), flagString))
+ }
+
+ /** Might be info or privateWithin */
+ val x = buf.readNat()
+ if (buf.readIndex == end) {
+ printFlags(None)
+ printReadNat(x)
+ }
+ else {
+ printFlags(Some(x))
+ printTypeRef()
+ }
}
/** Note: the entries which require some semantic analysis to be correctly
@@ -106,12 +187,12 @@ object ShowPickled extends Names {
*/
def printEntry(i: Int) {
buf.readIndex = index(i)
- out.print(i + "," + buf.readIndex + ": ")
+ p(i + "," + buf.readIndex + ": ")
val tag = buf.readByte()
out.print(tag2string(tag))
val len = buf.readNat()
val end = len + buf.readIndex
- out.print(" " + len + ":")
+ p(" " + len + ":")
tag match {
case TERMname =>
out.print(" ")
@@ -122,7 +203,7 @@ object ShowPickled extends Names {
out.print(newTypeName(buf.bytes, buf.readIndex, len))
buf.readIndex = end
case TYPEsym | ALIASsym | CLASSsym | MODULEsym | VALsym =>
- printSymInfo()
+ printSymInfo(end)
if (tag == CLASSsym && (buf.readIndex < end)) printTypeRef()
case EXTref | EXTMODCLASSref =>
printNameRef()
@@ -141,7 +222,7 @@ object ShowPickled extends Names {
printSymbolRef(); buf.until(end, printTypeRef)
case CLASSINFOtpe =>
printSymbolRef(); buf.until(end, printTypeRef)
- case METHODtpe =>
+ case METHODtpe | IMPLICITMETHODtpe =>
printTypeRef(); buf.until(end, printTypeRef)
case POLYtpe =>
printTypeRef(); buf.until(end, printSymbolRef)
@@ -179,6 +260,8 @@ object ShowPickled extends Names {
printTypeRef(); buf.until(end, printAnnotArgRef)
case ANNOTARGARRAY =>
buf.until(end, printConstAnnotArgRef)
+ case EXISTENTIALtpe =>
+ printTypeRef(); buf.until(end, printSymbolRef)
case _ =>
}
@@ -193,17 +276,28 @@ object ShowPickled extends Names {
for (i <- 0 until index.length) printEntry(i)
}
+ def fromFile(path: String) = fromBytes(io.File(path).toByteArray)
+ def fromName(name: String) = fromBytes(scalaSigBytesForPath(name) getOrElse Array())
+ def fromBytes(data: => Array[Byte]): Option[PickleBuffer] =
+ try Some(new PickleBuffer(data, 0, data.length))
+ catch { case _: Exception => None }
+
+ def show(what: String, pickle: PickleBuffer, bare: Boolean) = {
+ Console.println(what + ": ")
+ printFile(pickle, Console.out, bare)
+ }
+
+ /** Option --bare suppresses numbers so the output can be diffed.
+ */
def main(args: Array[String]) {
- val file = new File(args(0))
- try {
- val stream = new FileInputStream(file)
- val data = new Array[Byte](stream.available())
- stream.read(data)
- val pickle = new PickleBuffer(data, 0, data.length)
- printFile(pickle, Console.out)
- } catch {
- case ex: IOException =>
- Console.println("cannot read " + file + ": " + ex.getMessage())
+ val parsed = CommandLine(args.toList, List("--bare"), Nil)
+ def isBare = parsed isSet "--bare"
+
+ parsed.residualArgs foreach { arg =>
+ (fromFile(arg) orElse fromName(arg)) match {
+ case Some(pb) => show(arg, pb, isBare)
+ case _ => Console.println("Cannot read " + arg)
+ }
}
}
}
diff --git a/src/compiler/scala/tools/nsc/util/SourceFile.scala b/src/compiler/scala/tools/nsc/util/SourceFile.scala
index e8b6a1c63c..57d2cc782f 100644
--- a/src/compiler/scala/tools/nsc/util/SourceFile.scala
+++ b/src/compiler/scala/tools/nsc/util/SourceFile.scala
@@ -55,10 +55,10 @@ class BatchSourceFile(val file : AbstractFile, val content: Array[Char]) extends
def this(file: AbstractFile, cs: Seq[Char]) = this(file, cs.toArray)
override def equals(that : Any) = that match {
- case that : BatchSourceFile => file == that.file
+ case that : BatchSourceFile => file.path == that.file.path
case _ => false
}
- override def hashCode = file.hashCode
+ override def hashCode = file.path.hashCode
val length = content.length
// in SourceFileFragments, these are overridden to compensate during offset calculation
@@ -154,17 +154,11 @@ extends BatchSourceFile(name, contents)
/** The usual constructor. Specify a name for the compound file and
* a list of component sources.
*/
- def this(name: String, components: BatchSourceFile*) = {
- this(
- name,
- components.toList,
- Array.concat(components.map(comp =>
- CompoundSourceFile.stripSU(comp.content).toArray):_*))
- }
+ def this(name: String, components: BatchSourceFile*) =
+ this(name, components.toList, components flatMap (CompoundSourceFile stripSU _.content) toArray)
/** Create an instance with the specified components and a generic name. */
- def this(components: BatchSourceFile*) =
- this("(virtual file)", components.toList:_*)
+ def this(components: BatchSourceFile*) = this("(virtual file)", components: _*)
override def positionInUltimateSource(position: Position) = {
if (!position.isDefined) super.positionInUltimateSource(position)
@@ -193,7 +187,7 @@ extends BatchSourceFile(name, contents)
object CompoundSourceFile {
private[util] def stripSU(chars: Array[Char]) =
if (chars.length > 0 && chars.last == SU)
- chars.slice(0, chars.length-1)
+ chars dropRight 1
else
chars
}
diff --git a/src/compiler/scala/tools/nsc/util/Statistics.scala b/src/compiler/scala/tools/nsc/util/Statistics.scala
index 188f2fcdb2..5aee76d946 100644
--- a/src/compiler/scala/tools/nsc/util/Statistics.scala
+++ b/src/compiler/scala/tools/nsc/util/Statistics.scala
@@ -168,6 +168,7 @@ object Statistics {
val ctr1 = new Counter
val ctr2 = new Counter
val ctr3 = new Counter
+ val ctr4 = new Counter
val counter1: SubCounter = new SubCounter(subtypeCount)
val counter2: SubCounter = new SubCounter(subtypeCount)
val timer1: Timer = new Timer
@@ -205,7 +206,7 @@ abstract class Statistics {
if (phase.name != "parser") {
val counts = new ClassCounts
for (u <- currentRun.units; t <- u.body) counts(t.getClass) += 1
- inform("#retained nodes : " + counts.valuesIterable.sum)
+ inform("#retained nodes : " + counts.values.sum)
inform("#retained nodes by type : " + showCounts(counts))
inform("#typechecked identifiers : " + typedIdentCount)
inform("#typechecked selections : " + typedSelectCount)
@@ -268,6 +269,7 @@ abstract class Statistics {
if (ctr1 != null) inform("#ctr1 : " + ctr1)
if (ctr2 != null) inform("#ctr2 : " + ctr2)
if (ctr3 != null) inform("#ctr3 : " + ctr3)
+ if (ctr4 != null) inform("#ctr4 : " + ctr4)
if (counter1 != null) inform("#counter1 : " + counter1)
if (counter2 != null) inform("#counter2 : " + counter2)
if (timer1 != null) inform("#timer1 : " + timer1)
diff --git a/src/compiler/scala/tools/nsc/util/WorkScheduler.scala b/src/compiler/scala/tools/nsc/util/WorkScheduler.scala
index b4ecbf8a71..e309b19b76 100644
--- a/src/compiler/scala/tools/nsc/util/WorkScheduler.scala
+++ b/src/compiler/scala/tools/nsc/util/WorkScheduler.scala
@@ -8,7 +8,7 @@ class WorkScheduler {
type Action = () => Unit
private var todo = new Queue[Action]
- private var except = new Queue[Exception]
+ private var throwables = new Queue[Throwable]
/** Called from server: block until todo list is nonempty */
def waitForMoreWork() = synchronized {
@@ -30,12 +30,12 @@ class WorkScheduler {
/** Called from server: return optional exception posted by client
* Reset to no exception.
*/
- def pollException(): Option[Exception] = synchronized {
- if (except.isEmpty)
+ def pollThrowable(): Option[Throwable] = synchronized {
+ if (throwables.isEmpty)
None
else {
- val result = Some(except.dequeue())
- if (!except.isEmpty)
+ val result = Some(throwables.dequeue())
+ if (!throwables.isEmpty)
postWorkItem { () => }
result
}
@@ -55,8 +55,8 @@ class WorkScheduler {
/** Called from client:
* Require an exception to be thrown on next poll.
*/
- def raise(exc: Exception) = synchronized {
- except enqueue exc
+ def raise(exc: Throwable) = synchronized {
+ throwables enqueue exc
postWorkItem { () => }
}
}
diff --git a/src/compiler/scala/tools/util/BashCompletion.scala b/src/compiler/scala/tools/util/BashCompletion.scala
new file mode 100644
index 0000000000..ed9f1b505a
--- /dev/null
+++ b/src/compiler/scala/tools/util/BashCompletion.scala
@@ -0,0 +1,132 @@
+/* NSC -- new Scala compiler
+ * Copyright 2006-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools
+package util
+
+import nsc.{ Global, Settings }
+
+/** Examines Settings and generates a bash completion file
+ * containing both bells and whistles.
+ */
+object BashCompletion {
+ val completionTemplate = """
+# Bash Scala completion
+#
+# Add this file to /etc/bash_completion.d/ (or your local equivalent)
+# or place a line like this in your .bashrc or .profile:
+#
+# . /path/to/file/scala_completion.sh
+#
+# For more information, see:
+#
+# http://bash-completion.alioth.debian.org/
+#
+# This file is generated by running scala.tools.util.BashCompletion.
+#
+
+SCALA_PHASES="@@PHASES@@"
+SCALA_PHASE_SETTINGS=( @@PHASE_SETTINGS@@ )
+SCALA_OPTIONS="@@OPTIONS@@"
+SCALA_OPTIONS_EXPANDED="@@OPTIONS_EXPANDED@@"
+
+_scala_completion()
+{
+ local cur prev opts colonprefixes
+
+ COMPREPLY=()
+ opts=""
+ cur="${COMP_WORDS[COMP_CWORD]}"
+ prev="${COMP_WORDS[COMP_CWORD-1]}"
+ colonprefixes=${cur%"${cur##*:}"}
+
+ # special case escaping madness because bash treats : as a separator.
+ case "${cur}" in
+ -*:*)
+ precolon=$(echo "${cur}" | sed 's/:.*//g')
+
+ for p in ${SCALA_PHASE_SETTINGS[@]}; do
+ if [[ "${precolon}" == "${p}" ]] ; then
+ cur=$(echo "${cur}" | sed 's/.*://g') # cut cur down to postcolon part
+ opts=${SCALA_PHASES}
+ fi
+ done
+
+ if [ "${opts}" == "" ] ; then
+ opts=${SCALA_OPTIONS_EXPANDED}
+ fi
+ ;;
+ esac
+
+ if [ "${opts}" == "" ] ; then
+ opts=${SCALA_OPTIONS}
+ fi
+
+ COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) )
+
+ local i=${#COMPREPLY[*]}
+ while [ $((--i)) -ge 0 ]; do
+ COMPREPLY[$i]=${COMPREPLY[$i]#"$colonprefixes"}
+ done
+
+ return 0
+}
+
+_scala_commands()
+{
+@@PROGRAMS@@
+}
+_scala_commands
+ """.trim
+
+ private lazy val settings = new Settings()
+ import settings._
+
+ val phaseNames = "all" :: (new Global(settings) phaseNames)
+ val phaseSettings = settings.visibleSettings collect { case x: PhasesSetting => "\"" + x.name + "\"" }
+
+ def settingStrings(s: Setting, expanded: Boolean) = s match {
+ case x: ChoiceSetting => if (expanded) x.choices map (x.name + ":" + _) else List(x.name + ":")
+ case x: PhasesSetting => List(x.name + ":")
+ case x => List(x.name)
+ }
+
+ /** We embed one list which stops at : and another where all choice settings are expanded out
+ * to include the choices.
+ */
+ def settingNames = settings.visibleSettings.toList flatMap (x => settingStrings(x, false)) sorted
+ def settingNamesExpanded = settings.visibleSettings.toList flatMap (x => settingStrings(x, true)) sorted
+
+ def commandForName(name: String) = " complete -o default -F _scala_completion " + name + "\n"
+ def interpolate(template: String, what: (String, String)*) =
+ what.foldLeft(template) {
+ case (text, (key, value)) =>
+ val token = "@@" + key + "@@"
+
+ (text indexOf token) match {
+ case -1 => error("Token '%s' does not exist." format token)
+ case idx => (text take idx) + value + (text drop idx drop token.length)
+ }
+ }
+
+ def create(cmds: List[String]) = {
+ interpolate(completionTemplate,
+ "PROGRAMS" -> (cmds map commandForName mkString ""),
+ "OPTIONS" -> (settingNames mkString " "),
+ "OPTIONS_EXPANDED" -> (settingNamesExpanded mkString " "),
+ "PHASES" -> (phaseNames mkString " "),
+ "PHASE_SETTINGS" -> (phaseSettings mkString " ")
+ )
+ }
+
+ def main(args: Array[String]): Unit = {
+ val commands = if (args.isEmpty) List("fsc", "scala", "scalac", "scaladoc") else args.toList
+ val result = create(commands)
+ if (result contains "@@")
+ error("Some tokens were not replaced: text is " + result)
+
+ println(result)
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/util/ClassPathSettings.scala b/src/compiler/scala/tools/util/ClassPathSettings.scala
new file mode 100644
index 0000000000..ec2e1c3c5a
--- /dev/null
+++ b/src/compiler/scala/tools/util/ClassPathSettings.scala
@@ -0,0 +1,32 @@
+/* NSC -- new Scala compiler
+ * Copyright 2006-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools
+package util
+
+trait ClassPathSettings {
+ def javabootclasspath: String // -javabootclasspath
+ def javaextdirs: String // -javaextdirs
+ def bootclasspath: String // -bootclasspath
+ def extdirs: String // -extdirs
+ def classpath: String // -classpath
+ def sourcepath: String // -sourcepath
+}
+
+// val debugLogger = {
+// val f = File("/tmp/path-resolve-log.txt")
+// if (f.exists) f.truncate()
+// else f.createFile()
+//
+// val res = f.bufferedWriter()
+// res write ("Started debug log: %s\n".format(new java.util.Date))
+// res
+// }
+// def log(msg: Any) = {
+// Console println msg
+// debugLogger.write(msg.toString + "\n")
+// debugLogger flush
+// }
+
diff --git a/src/compiler/scala/tools/util/PathResolver.scala b/src/compiler/scala/tools/util/PathResolver.scala
new file mode 100644
index 0000000000..716b0b43dc
--- /dev/null
+++ b/src/compiler/scala/tools/util/PathResolver.scala
@@ -0,0 +1,253 @@
+/* NSC -- new Scala compiler
+ * Copyright 2006-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools
+package util
+
+import java.net.{ URL, MalformedURLException }
+import scala.util.Properties._
+import nsc.{ Settings, GenericRunnerSettings }
+import nsc.util.{ ClassPath, JavaClassPath, ScalaClassLoader }
+import nsc.io.{ File, Directory, Path }
+import ClassPath.{ JavaContext, DefaultJavaContext, join, split }
+import PartialFunction.condOpt
+
+// Loosely based on the draft specification at:
+// https://lampsvn.epfl.ch/trac/scala/wiki/Classpath
+
+object PathResolver {
+ def firstNonEmpty(xs: String*) = xs find (_ != "") getOrElse ""
+
+ private def fileOpt(f: Path): Option[String] = f ifFile (_.path)
+ private def dirOpt(d: Path): Option[String] = d ifDirectory (_.path)
+ private def expandToPath(p: Path) = join(ClassPath.expandPath(p.path, true): _*)
+ private def expandToContents(p: Path) = join(ClassPath.expandDir(p.path): _*)
+
+ /** Map all classpath elements to absolute paths and reconstruct the classpath.
+ */
+ def makeAbsolute(cp: String) = ClassPath.map(cp, x => Path(x).toAbsolute.path)
+
+ /** pretty print class path */
+ def ppcp(s: String) = split(s) match {
+ case Nil => ""
+ case Seq(x) => x
+ case xs => xs map ("\n" + _) mkString
+ }
+
+ /** Values found solely by inspecting environment or property variables.
+ */
+ object Environment {
+ private def searchForBootClasspath = {
+ import scala.collection.JavaConversions._
+ System.getProperties find (_._1 endsWith ".boot.class.path") map (_._2) getOrElse ""
+ }
+ private def searchForScalaHome = {
+ for (url <- ScalaClassLoader originOfClass classOf[ScalaObject] ; if url.getProtocol == "file") yield
+ File(url.getFile).parent.path
+ } getOrElse ""
+
+ /** Environment variables which java pays attention to so it
+ * seems we do as well.
+ */
+ def classPathEnv = envOrElse("CLASSPATH", "")
+ def sourcePathEnv = envOrElse("SOURCEPATH", "")
+
+ def javaBootClassPath = propOrElse("sun.boot.class.path", searchForBootClasspath)
+ def javaExtDirs = propOrEmpty("java.ext.dirs")
+ def scalaHome = propOrEmpty("scala.home")
+ def scalaExtDirs = propOrEmpty("scala.ext.dirs")
+
+ /** The java classpath and whether to use it. */
+ def javaUserClassPath = propOrElse("java.class.path", "")
+ def useJavaClassPath = propOrFalse("scala.usejavacp")
+
+ override def toString = """
+ |object Environment {
+ | scalaHome = %s (useJavaClassPath = %s)
+ | javaBootClassPath = <%d chars>
+ | javaExtDirs = %s
+ | javaUserClassPath = %s
+ | scalaExtDirs = %s
+ |}""".trim.stripMargin.format(
+ scalaHome, useJavaClassPath,
+ javaBootClassPath.length,
+ ppcp(javaExtDirs),
+ ppcp(javaUserClassPath),
+ ppcp(scalaExtDirs)
+ )
+ }
+
+ /** Default values based on those in Environment as interpreted according
+ * to the path resolution specification.
+ */
+ object Defaults {
+ /* Against my better judgment, giving in to martin here and allowing
+ * CLASSPATH as the default if no -cp is given. Only if there is no
+ * command line option or environment variable is "." used.
+ */
+ def scalaUserClassPath = firstNonEmpty(Environment.classPathEnv, ".")
+ def scalaSourcePath = Environment.sourcePathEnv
+
+ def javaBootClassPath = Environment.javaBootClassPath
+ def javaUserClassPath = Environment.javaUserClassPath
+ def javaExtDirs = Environment.javaExtDirs
+ def useJavaClassPath = Environment.useJavaClassPath
+
+ def scalaHome = Environment.scalaHome
+ def scalaHomeDir = Directory(scalaHome)
+ def scalaHomeExists = scalaHomeDir.isDirectory
+ def scalaLibDir = Directory(scalaHomeDir / "lib")
+ def scalaClassesDir = Directory(scalaHomeDir / "classes")
+
+ def scalaLibAsJar = File(scalaLibDir / "scala-library.jar")
+ def scalaLibAsDir = Directory(scalaClassesDir / "library")
+
+ def scalaLibDirFound: Option[Directory] =
+ if (scalaLibAsJar.isFile) Some(scalaLibDir)
+ else if (scalaLibAsDir.isDirectory) Some(scalaClassesDir)
+ else None
+
+ def scalaLibFound =
+ if (scalaLibAsJar.isFile) scalaLibAsJar.path
+ else if (scalaLibAsDir.isDirectory) scalaLibAsDir.path
+ else ""
+
+ def scalaBootClassPath = scalaLibDirFound match {
+ case Some(dir) if scalaHomeExists => join(ClassPath expandDir dir.path: _*)
+ case _ => ""
+ }
+
+ def scalaExtDirs = Environment.scalaExtDirs
+ def scalaPluginDirs = List("misc", "scala-devel", "plugins")
+ def scalaPluginPath = join(scalaPluginDirs map (scalaHomeDir / _ path): _*)
+
+ override def toString = """
+ |object Defaults {
+ | scalaHome = %s
+ | javaBootClassPath = %s
+ | scalaLibDirFound = %s
+ | scalaLibFound = %s
+ | scalaBootClassPath = %s
+ | scalaPluginPath = %s
+ |}""".trim.stripMargin.format(
+ scalaHome,
+ ppcp(javaBootClassPath),
+ scalaLibDirFound, scalaLibFound,
+ ppcp(scalaBootClassPath), ppcp(scalaPluginPath)
+ )
+ }
+
+ def fromPathString(path: String, context: JavaContext = DefaultJavaContext): JavaClassPath = {
+ val s = new Settings()
+ s.classpath.value = path
+ new PathResolver(s, context) result
+ }
+
+ /** With no arguments, show the interesting values in Environment and Defaults.
+ * If there are arguments, show those in Calculated as if those options had been
+ * given to a scala runner.
+ */
+ def main(args: Array[String]): Unit = {
+ if (args.isEmpty) {
+ println(Environment)
+ println(Defaults)
+ }
+ else {
+ val settings = new Settings()
+ val rest = settings.processArguments(args.toList, false)._2
+ val pr = new PathResolver(settings)
+ println(" COMMAND: 'scala %s'".format(args.mkString(" ")))
+ println("RESIDUAL: 'scala %s'\n".format(rest.mkString(" ")))
+ pr.result.show
+ }
+ }
+}
+import PathResolver.{ Defaults, Environment, firstNonEmpty, ppcp }
+
+class PathResolver(settings: Settings, context: JavaContext) {
+ def this(settings: Settings) = this(settings, if (settings.inline.value) new JavaContext else DefaultJavaContext)
+
+ private def cmdLineOrElse(name: String, alt: String) = {
+ (commandLineFor(name) match {
+ case Some("") => None
+ case x => x
+ }) getOrElse alt
+ }
+
+ private def commandLineFor(s: String): Option[String] = condOpt(s) {
+ case "javabootclasspath" => settings.javabootclasspath.value
+ case "javaextdirs" => settings.javaextdirs.value
+ case "bootclasspath" => settings.bootclasspath.value
+ case "extdirs" => settings.extdirs.value
+ case "classpath" | "cp" => settings.classpath.value
+ case "sourcepath" => settings.sourcepath.value
+ }
+
+ /** Calculated values based on any given command line options, falling back on
+ * those in Defaults.
+ */
+ object Calculated {
+ def scalaHome = Defaults.scalaHome
+ def useJavaClassPath = settings.usejavacp.value || Defaults.useJavaClassPath
+ def javaBootClassPath = cmdLineOrElse("javabootclasspath", Defaults.javaBootClassPath)
+ def javaExtDirs = cmdLineOrElse("javaextdirs", Defaults.javaExtDirs)
+ def javaUserClassPath = if (useJavaClassPath) Defaults.javaUserClassPath else ""
+ def scalaBootClassPath = cmdLineOrElse("bootclasspath", Defaults.scalaBootClassPath)
+ def scalaExtDirs = cmdLineOrElse("extdirs", Defaults.scalaExtDirs)
+ def userClassPath = cmdLineOrElse("classpath", Defaults.scalaUserClassPath)
+ def sourcePath = cmdLineOrElse("sourcepath", Defaults.scalaSourcePath)
+
+ import context._
+
+ // Assemble the elements!
+ def basis = List(
+ classesInPath(javaBootClassPath), // 1. The Java bootstrap class path.
+ contentsOfDirsInPath(javaExtDirs), // 2. The Java extension class path.
+ classesInExpandedPath(javaUserClassPath), // 3. The Java application class path.
+ classesInPath(scalaBootClassPath), // 4. The Scala boot class path.
+ contentsOfDirsInPath(scalaExtDirs), // 5. The Scala extension class path.
+ classesInExpandedPath(userClassPath), // 6. The Scala application class path.
+ sourcesInPath(sourcePath) // 7. The Scala source path.
+ )
+
+ lazy val containers = basis.flatten.distinct
+
+ override def toString = """
+ |object Calculated {
+ | scalaHome = %s
+ | javaBootClassPath = %s
+ | javaExtDirs = %s
+ | javaUserClassPath = %s
+ | useJavaClassPath = %s
+ | scalaBootClassPath = %s
+ | scalaExtDirs = %s
+ | userClassPath = %s
+ | sourcePath = %s
+ |}""".trim.stripMargin.format(
+ scalaHome,
+ ppcp(javaBootClassPath), ppcp(javaExtDirs), ppcp(javaUserClassPath),
+ useJavaClassPath,
+ ppcp(scalaBootClassPath), ppcp(scalaExtDirs), ppcp(userClassPath),
+ ppcp(sourcePath)
+ )
+ }
+
+ def containers = Calculated.containers
+
+ lazy val result = {
+ val cp = new JavaClassPath(containers, context)
+ if (settings.Ylogcp.value) {
+ Console.println("Classpath built from " + settings.toConciseString)
+ Console.println("Defaults: " + PathResolver.Defaults)
+
+ val xs = (Calculated.basis drop 2).flatten.distinct
+ println("After java boot/extdirs classpath has %d entries:" format xs.size)
+ xs foreach (x => println(" " + x))
+ }
+ cp
+ }
+
+ def asURLs = result.asURLs
+}
diff --git a/src/compiler/scala/tools/util/StringOps.scala b/src/compiler/scala/tools/util/StringOps.scala
index 01b2482147..6e5b3e54d3 100644
--- a/src/compiler/scala/tools/util/StringOps.scala
+++ b/src/compiler/scala/tools/util/StringOps.scala
@@ -8,16 +8,16 @@
// $Id$
-package scala.tools.util
+package scala.tools
+package util
-/** This objects provides methods to extract elements from
- * a string according to some defined character separator.
+/** This object provides utility methods to extract elements
+ * from Strings.
*
* @author Martin Odersky
* @version 1.0
*/
object StringOps {
-
def decompose(str: String, sep: Char): List[String] = {
def ws(start: Int): List[String] =
if (start == str.length) List()
@@ -31,4 +31,19 @@ object StringOps {
}
def words(str: String): List[String] = decompose(str, ' ')
+
+ def stripPrefixOpt(str: String, prefix: String): Option[String] =
+ if (str startsWith prefix) Some(str drop prefix.length)
+ else None
+
+ def stripSuffixOpt(str: String, suffix: String): Option[String] =
+ if (str endsWith suffix) Some(str dropRight suffix.length)
+ else None
+
+ def splitWhere(str: String, f: Char => Boolean, doDropIndex: Boolean = false): Option[(String, String)] =
+ splitAt(str, str indexWhere f, doDropIndex)
+
+ def splitAt(str: String, idx: Int, doDropIndex: Boolean = false): Option[(String, String)] =
+ if (idx == -1) None
+ else Some(str take idx, str drop (if (doDropIndex) idx + 1 else idx))
}
diff --git a/src/compiler/scala/tools/util/Which.scala b/src/compiler/scala/tools/util/Which.scala
new file mode 100644
index 0000000000..b331416f3d
--- /dev/null
+++ b/src/compiler/scala/tools/util/Which.scala
@@ -0,0 +1,39 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools
+package util
+
+import scala.tools.nsc._
+
+/** A tool for identifying which classfile is being used.
+ * under the given conditions.
+ */
+object Which
+{
+ def main(args: Array[String]): Unit = {
+ val settings = new Settings()
+ val names = settings.processArguments(args.toList, true)._2
+ val global = new Global(settings)
+ val cp = global.classPath
+
+ import cp._
+
+ for (name <- names) {
+ def fail = println("Could not find: %s".format(name))
+ (cp findClass name) match {
+ case Some(classRep) => classRep.binary match {
+ case Some(f) => println("%s is %s".format(name, f))
+ case _ => fail
+ }
+ case _ => fail
+ }
+ }
+ }
+}
+
+
+
+
diff --git a/src/continuations/library/scala/util/continuations/ControlContext.scala b/src/continuations/library/scala/util/continuations/ControlContext.scala
new file mode 100644
index 0000000000..87a9bf1fc5
--- /dev/null
+++ b/src/continuations/library/scala/util/continuations/ControlContext.scala
@@ -0,0 +1,161 @@
+// $Id$
+
+package scala.util.continuations
+
+
+class cpsParam[-B,+C] extends StaticAnnotation with TypeConstraint
+
+private class cpsSym[B] extends Annotation // implementation detail
+
+private class cpsSynth extends Annotation // implementation detail
+
+private class cpsPlus extends StaticAnnotation with TypeConstraint // implementation detail
+private class cpsMinus extends Annotation // implementation detail
+
+
+
+@serializable final class ControlContext[+A,-B,+C](val fun: (A => B, Exception => B) => C, val x: A) {
+
+ /*
+ final def map[A1](f: A => A1): ControlContext[A1,B,C] = {
+ new ControlContext((k:(A1 => B)) => fun((x:A) => k(f(x))), null.asInstanceOf[A1])
+ }
+
+ final def flatMap[A1,B1<:B](f: (A => ControlContext[A1,B1,B])): ControlContext[A1,B1,C] = {
+ new ControlContext((k:(A1 => B1)) => fun((x:A) => f(x).fun(k)))
+ }
+ */
+
+
+ @noinline final def map[A1](f: A => A1): ControlContext[A1,B,C] = {
+ if (fun eq null)
+ try {
+ new ControlContext(null, f(x)) // TODO: only alloc if f(x) != x
+ } catch {
+ case ex: Exception =>
+ new ControlContext((k: A1 => B, thr: Exception => B) => thr(ex).asInstanceOf[C], null.asInstanceOf[A1])
+ }
+ else
+ new ControlContext({ (k: A1 => B, thr: Exception => B) =>
+ fun( { (x:A) =>
+ var done = false
+ try {
+ val res = f(x)
+ done = true
+ k(res)
+ } catch {
+ case ex: Exception if !done =>
+ thr(ex)
+ }
+ }, thr)
+ }, null.asInstanceOf[A1])
+ }
+
+
+ // it would be nice if @inline would turn the trivial path into a tail call.
+ // unfortunately it doesn't, so we do it ourselves in SelectiveCPSTransform
+
+ @noinline final def flatMap[A1,B1,C1<:B](f: (A => ControlContext[A1,B1,C1])): ControlContext[A1,B1,C] = {
+ if (fun eq null)
+ try {
+ f(x).asInstanceOf[ControlContext[A1,B1,C]]
+ } catch {
+ case ex: Exception =>
+ new ControlContext((k: A1 => B1, thr: Exception => B1) => thr(ex).asInstanceOf[C], null.asInstanceOf[A1])
+ }
+ else
+ new ControlContext({ (k: A1 => B1, thr: Exception => B1) =>
+ fun( { (x:A) =>
+ var done = false
+ try {
+ val ctxR = f(x)
+ done = true
+ val res: C1 = ctxR.foreachFull(k, thr) // => B1
+ res
+ } catch {
+ case ex: Exception if !done =>
+ thr(ex).asInstanceOf[B] // => B NOTE: in general this is unsafe!
+ } // However, the plugin will not generate offending code
+ }, thr.asInstanceOf[Exception=>B]) // => B
+ }, null.asInstanceOf[A1])
+ }
+
+ final def foreach(f: A => B) = foreachFull(f, throw _)
+
+ def foreachFull(f: A => B, g: Exception => B): C = {
+ if (fun eq null)
+ f(x).asInstanceOf[C]
+ else
+ fun(f, g)
+ }
+
+
+ final def isTrivial = fun eq null
+ final def getTrivialValue = x.asInstanceOf[A]
+
+ // need filter or other functions?
+
+ final def flatMapCatch[A1>:A,B1<:B,C1>:C<:B1](pf: PartialFunction[Exception, ControlContext[A1,B1,C1]]): ControlContext[A1,B1,C1] = {
+ if (fun eq null)
+ this
+ else {
+ val fun1 = (ret1: A1 => B1, thr1: Exception => B1) => {
+ val thr: Exception => B1 = { t: Exception =>
+ var captureExceptions = true
+ try {
+ if (pf.isDefinedAt(t)) {
+ val cc1 = pf(t)
+ captureExceptions = false
+ cc1.foreachFull(ret1, thr1) // Throw => B
+ } else {
+ captureExceptions = false
+ thr1(t) // Throw => B1
+ }
+ } catch {
+ case t1: Exception if captureExceptions => thr1(t1) // => E2
+ }
+ }
+ fun(ret1, thr)// fun(ret1, thr) // => B
+ }
+ new ControlContext(fun1, null.asInstanceOf[A1])
+ }
+ }
+
+ final def mapFinally(f: () => Unit): ControlContext[A,B,C] = {
+ if (fun eq null) {
+ try {
+ f()
+ this
+ } catch {
+ case ex: Exception =>
+ new ControlContext((k: A => B, thr: Exception => B) => thr(ex).asInstanceOf[C], null.asInstanceOf[A])
+ }
+ } else {
+ val fun1 = (ret1: A => B, thr1: Exception => B) => {
+ val ret: A => B = { x: A =>
+ var captureExceptions = true
+ try {
+ f()
+ captureExceptions = false
+ ret1(x)
+ } catch {
+ case t1: Exception if captureExceptions => thr1(t1)
+ }
+ }
+ val thr: Exception => B = { t: Exception =>
+ var captureExceptions = true
+ try {
+ f()
+ captureExceptions = false
+ thr1(t)
+ } catch {
+ case t1: Exception if captureExceptions => thr1(t1)
+ }
+ }
+ fun(ret, thr1)
+ }
+ new ControlContext(fun1, null.asInstanceOf[A])
+ }
+ }
+
+}
diff --git a/src/continuations/library/scala/util/continuations/package.scala b/src/continuations/library/scala/util/continuations/package.scala
new file mode 100644
index 0000000000..aa4681a0cc
--- /dev/null
+++ b/src/continuations/library/scala/util/continuations/package.scala
@@ -0,0 +1,65 @@
+// $Id$
+
+
+// TODO: scaladoc
+
+package scala.util
+
+package object continuations {
+
+ type cps[A] = cpsParam[A,A]
+
+ type suspendable = cps[Unit]
+
+
+ def shift[A,B,C](fun: (A => B) => C): A @cpsParam[B,C] = {
+ throw new NoSuchMethodException("this code has to be compiled with the Scala continuations plugin enabled")
+ }
+
+ def reset[A,C](ctx: =>(A @cpsParam[A,C])): C = {
+ val ctxR = reify[A,A,C](ctx)
+ if (ctxR.isTrivial)
+ ctxR.getTrivialValue.asInstanceOf[C]
+ else
+ ctxR.foreach((x:A) => x)
+ }
+
+ def reset0[A](ctx: =>(A @cpsParam[A,A])): A = reset(ctx)
+
+ def run[A](ctx: =>(Any @cpsParam[Unit,A])): A = {
+ val ctxR = reify[Any,Unit,A](ctx)
+ if (ctxR.isTrivial)
+ ctxR.getTrivialValue.asInstanceOf[A]
+ else
+ ctxR.foreach((x:Any) => ())
+ }
+
+
+ // methods below are primarily implementation details and are not
+ // needed frequently in client code
+
+ def shiftUnit0[A,B](x: A): A @cpsParam[B,B] = {
+ shiftUnit[A,B,B](x)
+ }
+
+ def shiftUnit[A,B,C>:B](x: A): A @cpsParam[B,C] = {
+ throw new NoSuchMethodException("this code has to be compiled with the Scala continuations plugin enabled")
+ }
+
+ def reify[A,B,C](ctx: =>(A @cpsParam[B,C])): ControlContext[A,B,C] = {
+ throw new NoSuchMethodException("this code has to be compiled with the Scala continuations plugin enabled")
+ }
+
+ def shiftUnitR[A,B](x: A): ControlContext[A,B,B] = {
+ new ControlContext(null, x)
+ }
+
+ def shiftR[A,B,C](fun: (A => B) => C): ControlContext[A,B,C] = {
+ new ControlContext((f:A=>B,g:Exception=>B) => fun(f), null.asInstanceOf[A])
+ }
+
+ def reifyR[A,B,C](ctx: => ControlContext[A,B,C]): ControlContext[A,B,C] = {
+ ctx
+ }
+
+}
diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
new file mode 100644
index 0000000000..0c124c9c19
--- /dev/null
+++ b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
@@ -0,0 +1,462 @@
+// $Id$
+
+package scala.tools.selectivecps
+
+import scala.tools.nsc.Global
+
+import scala.collection.mutable.{Map, HashMap}
+
+import java.io.{StringWriter, PrintWriter}
+
+abstract class CPSAnnotationChecker extends CPSUtils {
+ val global: Global
+ import global._
+ import definitions._
+
+ //override val verbose = true
+
+ /**
+ * Checks whether @cps annotations conform
+ */
+ object checker extends AnnotationChecker {
+
+ /** Check annotations to decide whether tpe1 <:< tpe2 */
+ def annotationsConform(tpe1: Type, tpe2: Type): Boolean = {
+ if (!cpsEnabled) return true
+
+ vprintln("check annotations: " + tpe1 + " <:< " + tpe2)
+
+ // Nothing is least element, but Any is not the greatest
+ if (tpe1.typeSymbol eq NothingClass)
+ return true
+
+ val annots1 = filterAttribs(tpe1,MarkerCPSTypes)
+ val annots2 = filterAttribs(tpe2,MarkerCPSTypes)
+
+ // @plus and @minus should only occur at the left, and never together
+ // TODO: insert check
+ val adaptPlusAnnots1 = filterAttribs(tpe1,MarkerCPSAdaptPlus)
+ val adaptMinusAnnots1 = filterAttribs(tpe1,MarkerCPSAdaptMinus)
+
+ // @minus @cps is the same as no annotations
+ if (!adaptMinusAnnots1.isEmpty)
+ return annots2.isEmpty
+
+ // to handle answer type modification, we must make @plus <:< @cps
+ if (!adaptPlusAnnots1.isEmpty && annots1.isEmpty)
+ return true
+
+ // @plus @cps will fall through and compare the @cps type args
+
+ // @cps parameters must match exactly
+ if ((annots1 corresponds annots2) { _.atp <:< _.atp })
+ return true
+
+ false
+ }
+
+
+ /** Refine the computed least upper bound of a list of types.
+ * All this should do is add annotations. */
+ override def annotationsLub(tpe: Type, ts: List[Type]): Type = {
+ if (!cpsEnabled) return tpe
+
+ val annots1 = filterAttribs(tpe, MarkerCPSTypes)
+ val annots2 = ts flatMap (filterAttribs(_, MarkerCPSTypes))
+
+ if (annots2.nonEmpty) {
+ val cpsLub = AnnotationInfo(global.lub(annots1:::annots2 map (_.atp)), Nil, Nil)
+ val tpe1 = if (annots1.nonEmpty) removeAttribs(tpe, MarkerCPSTypes) else tpe
+ tpe1.withAnnotation(cpsLub)
+ } else tpe
+ }
+
+ /** Refine the bounds on type parameters to the given type arguments. */
+ override def adaptBoundsToAnnotations(bounds: List[TypeBounds], tparams: List[Symbol], targs: List[Type]): List[TypeBounds] = {
+ if (!cpsEnabled) return bounds
+
+ val anyAtCPS = AnnotationInfo(appliedType(MarkerCPSTypes.tpe, List(NothingClass.tpe, AnyClass.tpe)), Nil, Nil)
+ if (isFunctionType(tparams.head.owner.tpe) || tparams.head.owner == PartialFunctionClass) {
+ vprintln("function bound: " + tparams.head.owner.tpe + "/"+bounds+"/"+targs)
+ if (targs.last.hasAnnotation(MarkerCPSTypes))
+ bounds.reverse match {
+ case res::b if !res.hi.hasAnnotation(MarkerCPSTypes) =>
+ (TypeBounds(res.lo, res.hi.withAnnotation(anyAtCPS))::b).reverse
+ case _ => bounds
+ }
+ else
+ bounds
+ } else if (tparams.head.owner == ByNameParamClass) {
+ vprintln("byname bound: " + tparams.head.owner.tpe + "/"+bounds+"/"+targs)
+ if (targs.head.hasAnnotation(MarkerCPSTypes) && !bounds.head.hi.hasAnnotation(MarkerCPSTypes))
+ TypeBounds(bounds.head.lo, bounds.head.hi.withAnnotation(anyAtCPS))::Nil
+ else bounds
+ } else
+ bounds
+ }
+
+
+ override def canAdaptAnnotations(tree: Tree, mode: Int, pt: Type): Boolean = {
+ if (!cpsEnabled) return false
+ vprintln("can adapt annotations? " + tree + " / " + tree.tpe + " / " + Integer.toHexString(mode) + " / " + pt)
+
+ val annots1 = filterAttribs(tree.tpe,MarkerCPSTypes)
+ val annots2 = filterAttribs(pt,MarkerCPSTypes)
+
+ if ((mode & global.analyzer.PATTERNmode) != 0) {
+ //println("can adapt pattern annotations? " + tree + " / " + tree.tpe + " / " + Integer.toHexString(mode) + " / " + pt)
+ if (!annots1.isEmpty) {
+ return true
+ }
+ }
+
+/*
+ // not precise enough -- still relying on addAnnotations to remove things from ValDef symbols
+ if ((mode & global.analyzer.TYPEmode) != 0 && (mode & global.analyzer.BYVALmode) != 0) {
+ if (!annots1.isEmpty) {
+ return true
+ }
+ }
+*/
+
+/*
+ this interferes with overloading resolution
+ if ((mode & global.analyzer.BYVALmode) != 0 && tree.tpe <:< pt) {
+ vprintln("already compatible, can't adapt further")
+ return false
+ }
+*/
+ if ((mode & global.analyzer.EXPRmode) != 0) {
+ if ((annots1 corresponds annots2) { case (a1,a2) => a1.atp <:< a2.atp }) {
+ vprintln("already same, can't adapt further")
+ return false
+ }
+
+ if (annots1.isEmpty && !annots2.isEmpty && ((mode & global.analyzer.BYVALmode) == 0)) {
+ //println("can adapt annotations? " + tree + " / " + tree.tpe + " / " + Integer.toHexString(mode) + " / " + pt)
+ val adapt = AnnotationInfo(MarkerCPSAdaptPlus.tpe, Nil, Nil)
+ if (!tree.tpe.annotations.contains(adapt)) {
+ // val base = tree.tpe <:< removeAllCPSAnnotations(pt)
+ // val known = global.analyzer.isFullyDefined(pt)
+ // println(same + "/" + base + "/" + known)
+ //val same = annots2 forall { case AnnotationInfo(atp: TypeRef, _, _) => atp.typeArgs(0) =:= atp.typeArgs(1) }
+ // TBD: use same or not?
+ //if (same) {
+ vprintln("yes we can!! (unit)")
+ return true
+ //}
+ }
+ } else if (!annots1.isEmpty && ((mode & global.analyzer.BYVALmode) != 0)) {
+ if (!tree.tpe.hasAnnotation(MarkerCPSAdaptMinus)) {
+ vprintln("yes we can!! (byval)")
+ return true
+ }
+ }
+ }
+ false
+ }
+
+
+ override def adaptAnnotations(tree: Tree, mode: Int, pt: Type): Tree = {
+ if (!cpsEnabled) return tree
+
+ vprintln("adapt annotations " + tree + " / " + tree.tpe + " / " + Integer.toHexString(mode) + " / " + pt)
+
+ val annots1 = filterAttribs(tree.tpe,MarkerCPSTypes)
+ val annots2 = filterAttribs(pt,MarkerCPSTypes)
+
+ if ((mode & global.analyzer.PATTERNmode) != 0) {
+ if (!annots1.isEmpty) {
+ return tree.setType(removeAllCPSAnnotations(tree.tpe))
+ }
+ }
+
+/*
+ // doesn't work correctly -- still relying on addAnnotations to remove things from ValDef symbols
+ if ((mode & global.analyzer.TYPEmode) != 0 && (mode & global.analyzer.BYVALmode) != 0) {
+ if (!annots1.isEmpty) {
+ println("removing annotation from " + tree + "/" + tree.tpe)
+ val s = tree.setType(removeAllCPSAnnotations(tree.tpe))
+ println(s)
+ s
+ }
+ }
+*/
+
+ if ((mode & global.analyzer.EXPRmode) != 0) {
+ if (annots1.isEmpty && !annots2.isEmpty && ((mode & global.analyzer.BYVALmode) == 0)) { // shiftUnit
+ // add a marker annotation that will make tree.tpe behave as pt, subtyping wise
+ // tree will look like having any possible annotation
+ //println("adapt annotations " + tree + " / " + tree.tpe + " / " + Integer.toHexString(mode) + " / " + pt)
+
+ val adapt = AnnotationInfo(MarkerCPSAdaptPlus.tpe, Nil, Nil)
+ //val same = annots2 forall { case AnnotationInfo(atp: TypeRef, _, _) => atp.typeArgs(0) =:= atp.typeArgs(1) }
+ // TBD: use same or not? see infer0.scala/infer1.scala
+
+ // CAVEAT:
+ // for monomorphic answer types we want to have @plus @cps (for better checking)
+ // for answer type modification we want to have only @plus (because actual answer type may differ from pt)
+
+ //val known = global.analyzer.isFullyDefined(pt)
+
+ if (/*same &&*/ !tree.tpe.annotations.contains(adapt)) {
+ //if (known)
+ return tree.setType(tree.tpe.withAnnotations(adapt::annots2)) // needed for #1807
+ //else
+ // return tree.setType(tree.tpe.withAnnotations(adapt::Nil))
+ }
+ tree
+ } else if (!annots1.isEmpty && ((mode & global.analyzer.BYVALmode) != 0)) { // dropping annotation
+ // add a marker annotation that will make tree.tpe behave as pt, subtyping wise
+ // tree will look like having no annotation
+ if (!tree.tpe.hasAnnotation(MarkerCPSAdaptMinus)) {
+ val adapt = AnnotationInfo(MarkerCPSAdaptMinus.tpe, Nil, Nil)
+ return tree.setType(tree.tpe.withAnnotations(adapt::Nil))
+ }
+ }
+ }
+ tree
+ }
+
+
+ def updateAttributesFromChildren(tpe: Type, childAnnots: List[AnnotationInfo], byName: List[Tree]): Type = {
+ tpe match {
+ // Would need to push annots into each alternative of overloaded type
+ // But we can't, since alternatives aren't types but symbols, which we
+ // can't change (we'd be affecting symbols globally)
+ /*
+ case OverloadedType(pre, alts) =>
+ OverloadedType(pre, alts.map((sym: Symbol) => updateAttributes(pre.memberType(sym), annots)))
+ */
+ case _ =>
+ assert(childAnnots forall (_.atp.typeSymbol == MarkerCPSTypes), childAnnots)
+ /*
+ [] + [] = []
+ plus + [] = plus
+ cps + [] = cps
+ plus cps + [] = plus cps
+ minus cps + [] = minus cp
+ synth cps + [] = synth cps // <- synth on left - does it happen?
+
+ [] + cps = cps
+ plus + cps = synth cps
+ cps + cps = cps! <- lin
+ plus cps + cps = synth cps! <- unify
+ minus cps + cps = minus cps! <- lin
+ synth cps + cps = synth cps! <- unify
+ */
+
+ val plus = tpe.hasAnnotation(MarkerCPSAdaptPlus) || (tpe.hasAnnotation(MarkerCPSTypes) &&
+ byName.nonEmpty && byName.forall(_.tpe.hasAnnotation(MarkerCPSAdaptPlus)))
+
+ // move @plus annotations outward from by-name children
+ if (childAnnots.isEmpty) {
+ if (plus) { // @plus or @plus @cps
+ for (t <- byName) {
+ //println("removeAnnotation " + t + " / " + t.tpe)
+ t.setType(removeAttribs(t.tpe, MarkerCPSAdaptPlus, MarkerCPSTypes))
+ }
+ return tpe.withAnnotation(AnnotationInfo(MarkerCPSAdaptPlus.tpe, Nil, Nil))
+ } else
+ return tpe
+ }
+
+ val annots1 = filterAttribs(tpe, MarkerCPSTypes)
+
+ if (annots1.isEmpty) { // nothing or @plus
+ val synth = MarkerCPSSynth.tpe
+ val annots2 = List(linearize(childAnnots))
+ removeAttribs(tpe,MarkerCPSAdaptPlus).withAnnotations(AnnotationInfo(synth, Nil, Nil)::annots2)
+ } else {
+ val annot1 = single(annots1)
+ if (plus) { // @plus @cps
+ val synth = AnnotationInfo(MarkerCPSSynth.tpe, Nil, Nil)
+ val annot2 = linearize(childAnnots)
+ if (!(annot2.atp <:< annot1.atp))
+ throw new TypeError(annot2 + " is not a subtype of " + annot1)
+ val res = removeAttribs(tpe, MarkerCPSAdaptPlus, MarkerCPSTypes).withAnnotations(List(synth, annot2))
+ for (t <- byName) {
+ //println("removeAnnotation " + t + " / " + t.tpe)
+ t.setType(removeAttribs(t.tpe, MarkerCPSAdaptPlus, MarkerCPSTypes))
+ }
+ res
+ } else if (tpe.hasAnnotation(MarkerCPSSynth)) { // @synth @cps
+ val annot2 = linearize(childAnnots)
+ if (!(annot2.atp <:< annot1.atp))
+ throw new TypeError(annot2 + " is not a subtype of " + annot1)
+ removeAttribs(tpe, MarkerCPSTypes).withAnnotation(annot2)
+ } else { // @cps
+ removeAttribs(tpe, MarkerCPSTypes).withAnnotation(linearize(childAnnots:::annots1))
+ }
+ }
+ }
+ }
+
+
+
+
+
+ def transArgList(fun: Tree, args: List[Tree]): List[List[Tree]] = {
+ val formals = fun.tpe.paramTypes
+ val overshoot = args.length - formals.length
+
+ for ((a,tp) <- args.zip(formals ::: List.fill(overshoot)(NoType))) yield {
+ tp match {
+ case TypeRef(_, sym, List(elemtp)) if sym == ByNameParamClass =>
+ Nil // TODO: check conformance??
+ case _ =>
+ List(a)
+ }
+ }
+ }
+
+
+ def transStms(stms: List[Tree]): List[Tree] = stms match {
+ case ValDef(mods, name, tpt, rhs)::xs =>
+ rhs::transStms(xs)
+ case Assign(lhs, rhs)::xs =>
+ rhs::transStms(xs)
+ case x::xs =>
+ x::transStms(xs)
+ case Nil =>
+ Nil
+ }
+
+ def single(xs: List[AnnotationInfo]) = xs match {
+ case List(x) => x
+ case _ =>
+ global.error("not a single cps annotation: " + xs)// FIXME: error message
+ xs(0)
+ }
+
+ def transChildrenInOrder(tree: Tree, tpe: Type, childTrees: List[Tree], byName: List[Tree]) = {
+ val children = childTrees.flatMap { t =>
+ if (t.tpe eq null) Nil else {
+ val types = filterAttribs(t.tpe, MarkerCPSTypes)
+ // TODO: check that it has been adapted and if so correctly
+ if (types.isEmpty) Nil else List(single(types))
+ }
+ }
+
+ val newtpe = updateAttributesFromChildren(tpe, children, byName)
+
+ if (!newtpe.annotations.isEmpty)
+ vprintln("[checker] inferred " + tree + " / " + tpe + " ===> "+ newtpe)
+
+ newtpe
+ }
+
+ /** Modify the type that has thus far been inferred
+ * for a tree. All this should do is add annotations. */
+
+ override def addAnnotations(tree: Tree, tpe: Type): Type = {
+ if (!cpsEnabled) {
+ if (tpe.annotations.nonEmpty && tpe.hasAnnotation(MarkerCPSTypes))
+ global.reporter.error(tree.pos, "this code must be compiled with the Scala continuations plugin enabled")
+ return tpe
+ }
+
+// if (tree.tpe.hasAnnotation(MarkerCPSAdaptPlus))
+// println("addAnnotation " + tree + "/" + tpe)
+
+ tree match {
+
+ case Apply(fun @ Select(qual, name), args) if (fun.tpe ne null) && !fun.tpe.isErroneous =>
+
+ // HACK: With overloaded methods, fun will never get annotated. This is because
+ // the 'overloaded' type gets annotated, but not the alternatives (among which
+ // fun's type is chosen)
+
+ vprintln("[checker] checking select apply " + tree + "/" + tpe)
+
+ transChildrenInOrder(tree, tpe, qual::(transArgList(fun, args).flatten), Nil)
+
+ case TypeApply(fun @ Select(qual, name), args) if (fun.tpe ne null) && !fun.tpe.isErroneous =>
+ vprintln("[checker] checking select apply " + tree + "/" + tpe)
+
+ transChildrenInOrder(tree, tpe, List(qual, fun), Nil)
+
+ case Apply(fun, args) if (fun.tpe ne null) && !fun.tpe.isErroneous =>
+
+ vprintln("[checker] checking unknown apply " + tree + "/" + tpe)
+
+ transChildrenInOrder(tree, tpe, fun::(transArgList(fun, args).flatten), Nil)
+
+ case TypeApply(fun, args) =>
+
+ vprintln("[checker] checking type apply " + tree + "/" + tpe)
+
+ transChildrenInOrder(tree, tpe, List(fun), Nil)
+
+ case Select(qual, name) =>
+
+ vprintln("[checker] checking select " + tree + "/" + tpe)
+
+ // straightforward way is problematic (see select.scala and Test2.scala)
+ // transChildrenInOrder(tree, tpe, List(qual), Nil)
+
+ // the problem is that qual may be of type OverloadedType (or MethodType) and
+ // we cannot safely annotate these. so we just ignore these cases and
+ // clean up later in the Apply/TypeApply trees.
+
+ if (qual.tpe.hasAnnotation(MarkerCPSTypes)) {
+ // however there is one special case:
+ // if it's a method without parameters, just apply it. normally done in adapt, but
+ // we have to do it here so we don't lose the cps information (wouldn't trigger our
+ // adapt and there is no Apply/TypeApply created)
+ tpe match {
+ case PolyType(List(), restpe) =>
+ //println("yep: " + restpe + "," + restpe.getClass)
+ transChildrenInOrder(tree, restpe, List(qual), Nil)
+ case _ : PolyType => tpe
+ case _ : MethodType => tpe
+ case _ : OverloadedType => tpe
+ case _ =>
+ transChildrenInOrder(tree, tpe, List(qual), Nil)
+ }
+ } else
+ tpe
+
+ case If(cond, thenp, elsep) =>
+ transChildrenInOrder(tree, tpe, List(cond), List(thenp, elsep))
+
+ case Match(select, cases) =>
+ // TODO: can there be cases that are not CaseDefs?? check collect vs map!
+ transChildrenInOrder(tree, tpe, List(select), cases:::(cases collect { case CaseDef(_, _, body) => body }))
+
+ case Try(block, catches, finalizer) =>
+ val tpe1 = transChildrenInOrder(tree, tpe, Nil, block::catches:::(catches collect { case CaseDef(_, _, body) => body }))
+
+ val annots = filterAttribs(tpe1, MarkerCPSTypes)
+ if (annots.nonEmpty) {
+ val ann = single(annots)
+ val atp0::atp1::Nil = ann.atp.normalize.typeArgs
+ if (!(atp0 =:= atp1))
+ throw new TypeError("only simple cps types allowed in try/catch blocks (found: " + tpe1 + ")")
+ if (!finalizer.isEmpty) // no finalizers allowed. see explanation in SelectiveCPSTransform
+ reporter.error(tree.pos, "try/catch blocks that use continuations cannot have finalizers")
+ }
+ tpe1
+
+ case Block(stms, expr) =>
+ // if any stm has annotation, so does block
+ transChildrenInOrder(tree, tpe, transStms(stms), List(expr))
+
+ case ValDef(mods, name, tpt, rhs) =>
+ vprintln("[checker] checking valdef " + name + "/"+tpe+"/"+tpt+"/"+tree.symbol.tpe)
+ // ValDef symbols must *not* have annotations!
+ if (hasAnswerTypeAnn(tree.symbol.info)) { // is it okay to modify sym here?
+ vprintln("removing annotation from sym " + tree.symbol + "/" + tree.symbol.tpe + "/" + tpt)
+ tpt.setType(removeAllCPSAnnotations(tpt.tpe))
+ tree.symbol.setInfo(removeAllCPSAnnotations(tree.symbol.info))
+ }
+ tpe
+
+ case _ =>
+ tpe
+ }
+
+
+ }
+ }
+}
diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
new file mode 100644
index 0000000000..57cba6e829
--- /dev/null
+++ b/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
@@ -0,0 +1,131 @@
+// $Id$
+
+package scala.tools.selectivecps
+
+import scala.tools.nsc.Global
+
+trait CPSUtils {
+ val global: Global
+ import global._
+ import definitions._
+
+ var cpsEnabled = false
+ val verbose: Boolean = System.getProperty("cpsVerbose", "false") == "true"
+ @inline final def vprintln(x: =>Any): Unit = if (verbose) println(x)
+
+
+ lazy val MarkerCPSSym = definitions.getClass("scala.util.continuations.cpsSym")
+ lazy val MarkerCPSTypes = definitions.getClass("scala.util.continuations.cpsParam")
+ lazy val MarkerCPSSynth = definitions.getClass("scala.util.continuations.cpsSynth")
+
+ lazy val MarkerCPSAdaptPlus = definitions.getClass("scala.util.continuations.cpsPlus")
+ lazy val MarkerCPSAdaptMinus = definitions.getClass("scala.util.continuations.cpsMinus")
+
+
+ lazy val Context = definitions.getClass("scala.util.continuations.ControlContext")
+
+ lazy val ModCPS = definitions.getModule("scala.util.continuations")
+ lazy val MethShiftUnit = definitions.getMember(ModCPS, "shiftUnit")
+ lazy val MethShiftUnitR = definitions.getMember(ModCPS, "shiftUnitR")
+ lazy val MethShift = definitions.getMember(ModCPS, "shift")
+ lazy val MethShiftR = definitions.getMember(ModCPS, "shiftR")
+ lazy val MethReify = definitions.getMember(ModCPS, "reify")
+ lazy val MethReifyR = definitions.getMember(ModCPS, "reifyR")
+
+
+ lazy val allCPSAnnotations = List(MarkerCPSSym, MarkerCPSTypes, MarkerCPSSynth,
+ MarkerCPSAdaptPlus, MarkerCPSAdaptMinus)
+
+ // annotation checker
+
+ def filterAttribs(tpe:Type, cls:Symbol) =
+ tpe.annotations.filter(_.atp.typeSymbol == cls)
+
+ def removeAttribs(tpe:Type, cls:Symbol*) =
+ tpe.withoutAnnotations.withAnnotations(tpe.annotations.filterNot(cls contains _.atp.typeSymbol))
+
+ def removeAllCPSAnnotations(tpe: Type) = removeAttribs(tpe, allCPSAnnotations:_*)
+
+ def linearize(ann: List[AnnotationInfo]): AnnotationInfo = {
+ ann.reduceLeft { (a, b) =>
+ val atp0::atp1::Nil = a.atp.normalize.typeArgs
+ val btp0::btp1::Nil = b.atp.normalize.typeArgs
+ val (u0,v0) = (atp0, atp1)
+ val (u1,v1) = (btp0, btp1)
+/*
+ val (u0,v0) = (a.atp.typeArgs(0), a.atp.typeArgs(1))
+ val (u1,v1) = (b.atp.typeArgs(0), b.atp.typeArgs(1))
+ vprintln("check lin " + a + " andThen " + b)
+*/
+ vprintln("check lin " + a + " andThen " + b)
+ if (!(v1 <:< u0))
+ throw new TypeError("illegal answer type modification: " + a + " andThen " + b)
+ // TODO: improve error message (but it is not very common)
+ AnnotationInfo(appliedType(MarkerCPSTypes.tpe, List(u1,v0)),Nil,Nil)
+ }
+ }
+
+ // anf transform
+
+ def getExternalAnswerTypeAnn(tp: Type) = {
+ tp.annotations.find(a => a.atp.typeSymbol == MarkerCPSTypes) match {
+ case Some(AnnotationInfo(atp, _, _)) =>
+ val atp0::atp1::Nil = atp.normalize.typeArgs
+ Some((atp0, atp1))
+ case None =>
+ if (tp.hasAnnotation(MarkerCPSAdaptPlus))
+ global.warning("trying to instantiate type " + tp + " to unknown cps type")
+ None
+ }
+ }
+
+ def getAnswerTypeAnn(tp: Type) = {
+ tp.annotations.find(a => a.atp.typeSymbol == MarkerCPSTypes) match {
+ case Some(AnnotationInfo(atp, _, _)) =>
+ if (!tp.hasAnnotation(MarkerCPSAdaptPlus)) {//&& !tp.hasAnnotation(MarkerCPSAdaptMinus))
+ val atp0::atp1::Nil = atp.normalize.typeArgs
+ Some((atp0, atp1))
+ } else
+ None
+ case None => None
+ }
+ }
+
+ def hasAnswerTypeAnn(tp: Type) = {
+ tp.hasAnnotation(MarkerCPSTypes) && !tp.hasAnnotation(MarkerCPSAdaptPlus) /*&&
+ !tp.hasAnnotation(MarkerCPSAdaptMinus)*/
+ }
+
+ def hasSynthAnn(tp: Type) = {
+ tp.annotations.exists(a => a.atp.typeSymbol == MarkerCPSSynth)
+ }
+
+ def updateSynthFlag(tree: Tree) = { // remove annotations if *we* added them (@synth present)
+ if (hasSynthAnn(tree.tpe)) {
+ log("removing annotation from " + tree)
+ tree.setType(removeAllCPSAnnotations(tree.tpe))
+ } else
+ tree
+ }
+
+ type CPSInfo = Option[(Type,Type)]
+
+ def linearize(a: CPSInfo, b: CPSInfo)(implicit unit: CompilationUnit, pos: Position): CPSInfo = {
+ (a,b) match {
+ case (Some((u0,v0)), Some((u1,v1))) =>
+ vprintln("check lin " + a + " andThen " + b)
+ if (!(v1 <:< u0)) {
+ unit.error(pos,"cannot change answer type in composition of cps expressions " +
+ "from " + u1 + " to " + v0 + " because " + v1 + " is not a subtype of " + u0 + ".")
+ throw new Exception("check lin " + a + " andThen " + b)
+ }
+ Some((u1,v0))
+ case (Some(_), _) => a
+ case (_, Some(_)) => b
+ case _ => None
+ }
+ }
+
+ // cps transform
+
+} \ No newline at end of file
diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala
new file mode 100644
index 0000000000..0525e6fdbc
--- /dev/null
+++ b/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala
@@ -0,0 +1,414 @@
+// $Id$
+
+package scala.tools.selectivecps
+
+import scala.tools.nsc._
+import scala.tools.nsc.transform._
+import scala.tools.nsc.symtab._
+import scala.tools.nsc.plugins._
+
+import scala.tools.nsc.ast._
+
+/**
+ * In methods marked @cps, explicitly name results of calls to other @cps methods
+ */
+abstract class SelectiveANFTransform extends PluginComponent with Transform with
+ TypingTransformers with CPSUtils {
+ // inherits abstract value `global' and class `Phase' from Transform
+
+ import global._ // the global environment
+ import definitions._ // standard classes and methods
+ import typer.atOwner // methods to type trees
+
+ /** the following two members override abstract members in Transform */
+ val phaseName: String = "selectiveanf"
+
+ protected def newTransformer(unit: CompilationUnit): Transformer =
+ new ANFTransformer(unit)
+
+
+ class ANFTransformer(unit: CompilationUnit) extends TypingTransformer(unit) {
+
+ implicit val _unit = unit // allow code in CPSUtils.scala to report errors
+ var cpsAllowed: Boolean = false // detect cps code in places we do not handle (yet)
+
+ override def transform(tree: Tree): Tree = {
+ if (!cpsEnabled) return tree
+
+ tree match {
+
+ // Maybe we should further generalize the transform and move it over
+ // to the regular Transformer facility. But then, actual and required cps
+ // state would need more complicated (stateful!) tracking.
+
+ // Making the default case use transExpr(tree, None, None) instead of
+ // calling super.transform() would be a start, but at the moment,
+ // this would cause infinite recursion. But we could remove the
+ // ValDef case here.
+
+ case dd @ DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
+ log("transforming " + dd.symbol)
+
+ atOwner(dd.symbol) {
+ val rhs1 = transExpr(rhs, None, getExternalAnswerTypeAnn(tpt.tpe))
+
+ log("result "+rhs1)
+ log("result is of type "+rhs1.tpe)
+
+ treeCopy.DefDef(dd, mods, name, transformTypeDefs(tparams), transformValDefss(vparamss),
+ transform(tpt), rhs1)
+ }
+
+ case ff @ Function(vparams, body) =>
+ log("transforming anon function " + ff.symbol)
+
+ atOwner(ff.symbol) {
+
+ //val body1 = transExpr(body, None, getExternalAnswerTypeAnn(body.tpe))
+
+ // need to special case partial functions: if expected type is @cps
+ // but all cases are pure, then we would transform
+ // { x => x match { case A => ... }} to
+ // { x => shiftUnit(x match { case A => ... })}
+ // which Uncurry cannot handle (see function6.scala)
+
+ val ext = getExternalAnswerTypeAnn(body.tpe)
+
+ val body1 = body match {
+ case Match(selector, cases) if (ext.isDefined && getAnswerTypeAnn(body.tpe).isEmpty) =>
+ val cases1 = for {
+ cd @ CaseDef(pat, guard, caseBody) <- cases
+ val caseBody1 = transExpr(body, None, ext)
+ } yield {
+ treeCopy.CaseDef(cd, transform(pat), transform(guard), caseBody1)
+ }
+ treeCopy.Match(tree, transform(selector), cases1)
+
+ case _ =>
+ transExpr(body, None, ext)
+ }
+
+ log("result "+body1)
+ log("result is of type "+body1.tpe)
+
+ treeCopy.Function(ff, transformValDefs(vparams), body1)
+ }
+
+ case vd @ ValDef(mods, name, tpt, rhs) => // object-level valdefs
+ log("transforming valdef " + vd.symbol)
+
+ atOwner(vd.symbol) {
+
+ assert(getExternalAnswerTypeAnn(tpt.tpe) == None)
+
+ val rhs1 = transExpr(rhs, None, None)
+
+ treeCopy.ValDef(vd, mods, name, transform(tpt), rhs1)
+ }
+
+ case TypeTree() =>
+ // circumvent cpsAllowed here
+ super.transform(tree)
+
+ case Apply(_,_) =>
+ // this allows reset { ... } in object constructors
+ // it's kind of a hack to put it here (see note above)
+ transExpr(tree, None, None)
+
+ case _ =>
+
+ if (hasAnswerTypeAnn(tree.tpe)) {
+ if (!cpsAllowed)
+ unit.error(tree.pos, "cps code not allowed here / " + tree.getClass + " / " + tree)
+
+ log(tree)
+ }
+
+ cpsAllowed = false
+ super.transform(tree)
+ }
+ }
+
+
+ def transExpr(tree: Tree, cpsA: CPSInfo, cpsR: CPSInfo): Tree = {
+ transTailValue(tree, cpsA, cpsR) match {
+ case (Nil, b) => b
+ case (a, b) =>
+ treeCopy.Block(tree, a,b)
+ }
+ }
+
+
+ def transArgList(fun: Tree, args: List[Tree], cpsA: CPSInfo): (List[List[Tree]], List[Tree], CPSInfo) = {
+ val formals = fun.tpe.paramTypes
+ val overshoot = args.length - formals.length
+
+ var spc: CPSInfo = cpsA
+
+ val (stm,expr) = (for ((a,tp) <- args.zip(formals ::: List.fill(overshoot)(NoType))) yield {
+ tp match {
+ case TypeRef(_, sym, List(elemtp)) if sym == ByNameParamClass =>
+ (Nil, transExpr(a, None, getAnswerTypeAnn(elemtp)))
+ case _ =>
+ val (valStm, valExpr, valSpc) = transInlineValue(a, spc)
+ spc = valSpc
+ (valStm, valExpr)
+ }
+ }).unzip
+
+ (stm,expr,spc)
+ }
+
+
+ def transValue(tree: Tree, cpsA: CPSInfo, cpsR: CPSInfo): (List[Tree], Tree, CPSInfo) = {
+ // return value: (stms, expr, spc), where spc is CPSInfo after stms but *before* expr
+ implicit val pos = tree.pos
+ tree match {
+ case Block(stms, expr) =>
+ val (cpsA2, cpsR2) = (cpsA, linearize(cpsA, getAnswerTypeAnn(tree.tpe))) // tbd
+// val (cpsA2, cpsR2) = (None, getAnswerTypeAnn(tree.tpe))
+ val (a, b) = transBlock(stms, expr, cpsA2, cpsR2)
+
+ val tree1 = (treeCopy.Block(tree, a, b)) // no updateSynthFlag here!!!
+
+ (Nil, tree1, cpsA)
+
+ case If(cond, thenp, elsep) =>
+
+ val (condStats, condVal, spc) = transInlineValue(cond, cpsA)
+
+ val (cpsA2, cpsR2) = (spc, linearize(spc, getAnswerTypeAnn(tree.tpe)))
+// val (cpsA2, cpsR2) = (None, getAnswerTypeAnn(tree.tpe))
+ val thenVal = transExpr(thenp, cpsA2, cpsR2)
+ val elseVal = transExpr(elsep, cpsA2, cpsR2)
+
+ // check that then and else parts agree (not necessary any more, but left as sanity check)
+ if (cpsR.isDefined) {
+ if (elsep == EmptyTree)
+ unit.error(tree.pos, "always need else part in cps code")
+ }
+ if (hasAnswerTypeAnn(thenVal.tpe) != hasAnswerTypeAnn(elseVal.tpe)) {
+ unit.error(tree.pos, "then and else parts must both be cps code or neither of them")
+ }
+
+ (condStats, updateSynthFlag(treeCopy.If(tree, condVal, thenVal, elseVal)), spc)
+
+ case Match(selector, cases) =>
+
+ val (selStats, selVal, spc) = transInlineValue(selector, cpsA)
+ val (cpsA2, cpsR2) = (spc, linearize(spc, getAnswerTypeAnn(tree.tpe)))
+// val (cpsA2, cpsR2) = (None, getAnswerTypeAnn(tree.tpe))
+
+ val caseVals = for {
+ cd @ CaseDef(pat, guard, body) <- cases
+ val bodyVal = transExpr(body, cpsA2, cpsR2)
+ } yield {
+ treeCopy.CaseDef(cd, transform(pat), transform(guard), bodyVal)
+ }
+
+ (selStats, updateSynthFlag(treeCopy.Match(tree, selVal, caseVals)), spc)
+
+
+ case ldef @ LabelDef(name, params, rhs) =>
+ if (hasAnswerTypeAnn(tree.tpe)) {
+ val sym = currentOwner.newMethod(tree.pos, name)//unit.fresh.newName(tree.pos, "myloopvar"))
+ .setInfo(ldef.symbol.info)
+ .setFlag(Flags.SYNTHETIC)
+
+ new TreeSymSubstituter(List(ldef.symbol), List(sym)).traverse(rhs)
+ val rhsVal = transExpr(rhs, None, getAnswerTypeAnn(tree.tpe))
+
+ val stm1 = localTyper.typed(DefDef(sym, rhsVal))
+ val expr = localTyper.typed(Apply(Ident(sym), List()))
+
+ (List(stm1), expr, cpsA)
+ } else {
+ val rhsVal = transExpr(rhs, None, None)
+ (Nil, updateSynthFlag(treeCopy.LabelDef(tree, name, params, rhsVal)), cpsA)
+ }
+
+
+ case Try(block, catches, finalizer) =>
+ val blockVal = transExpr(block, cpsA, cpsR)
+
+ val catchVals = for {
+ cd @ CaseDef(pat, guard, body) <- catches
+ val bodyVal = transExpr(body, cpsA, cpsR)
+ } yield {
+ treeCopy.CaseDef(cd, transform(pat), transform(guard), bodyVal)
+ }
+
+ val finallyVal = transExpr(finalizer, None, None) // for now, no cps in finally
+
+ (Nil, updateSynthFlag(treeCopy.Try(tree, blockVal, catchVals, finallyVal)), cpsA)
+
+ case Assign(lhs, rhs) =>
+ // allow cps code in rhs only
+ val (stms, expr, spc) = transInlineValue(rhs, cpsA)
+ (stms, updateSynthFlag(treeCopy.Assign(tree, transform(lhs), expr)), spc)
+
+ case Return(expr0) =>
+ val (stms, expr, spc) = transInlineValue(expr0, cpsA)
+ (stms, updateSynthFlag(treeCopy.Return(tree, expr)), spc)
+
+ case Throw(expr0) =>
+ val (stms, expr, spc) = transInlineValue(expr0, cpsA)
+ (stms, updateSynthFlag(treeCopy.Throw(tree, expr)), spc)
+
+ case Typed(expr0, tpt) =>
+ // TODO: should x: A @cps[B,C] have a special meaning?
+ // type casts used in different ways (see match2.scala, #3199)
+ val (stms, expr, spc) = transInlineValue(expr0, cpsA)
+ val tpt1 = if (treeInfo.isWildcardStarArg(tree)) tpt else
+ treeCopy.TypeTree(tpt).setType(removeAllCPSAnnotations(tpt.tpe))
+// (stms, updateSynthFlag(treeCopy.Typed(tree, expr, tpt1)), spc)
+ (stms, treeCopy.Typed(tree, expr, tpt1).setType(removeAllCPSAnnotations(tree.tpe)), spc)
+
+ case TypeApply(fun, args) =>
+ val (stms, expr, spc) = transInlineValue(fun, cpsA)
+ (stms, updateSynthFlag(treeCopy.TypeApply(tree, expr, args)), spc)
+
+ case Select(qual, name) =>
+ val (stms, expr, spc) = transInlineValue(qual, cpsA)
+ (stms, updateSynthFlag(treeCopy.Select(tree, expr, name)), spc)
+
+ case Apply(fun, args) =>
+ val (funStm, funExpr, funSpc) = transInlineValue(fun, cpsA)
+ val (argStm, argExpr, argSpc) = transArgList(fun, args, funSpc)
+
+ (funStm ::: (argStm.flatten), updateSynthFlag(treeCopy.Apply(tree, funExpr, argExpr)),
+ argSpc)
+
+ case _ =>
+ cpsAllowed = true
+ (Nil, transform(tree), cpsA)
+ }
+ }
+
+ def transTailValue(tree: Tree, cpsA: CPSInfo, cpsR: CPSInfo): (List[Tree], Tree) = {
+
+ val (stms, expr, spc) = transValue(tree, cpsA, cpsR)
+
+ val bot = linearize(spc, getAnswerTypeAnn(expr.tpe))(unit, tree.pos)
+
+ val plainTpe = removeAllCPSAnnotations(expr.tpe)
+
+ if (cpsR.isDefined && !bot.isDefined) {
+
+ if (!expr.isEmpty && (expr.tpe.typeSymbol ne NothingClass)) {
+ // must convert!
+ log("cps type conversion (has: " + cpsA + "/" + spc + "/" + expr.tpe + ")")
+ log("cps type conversion (expected: " + cpsR.get + "): " + expr)
+
+ if (!expr.tpe.hasAnnotation(MarkerCPSAdaptPlus))
+ unit.warning(tree.pos, "expression " + tree + " is cps-transformed unexpectedly")
+
+ try {
+ val Some((a, b)) = cpsR
+
+ val res = localTyper.typed(atPos(tree.pos) {
+ Apply(TypeApply(gen.mkAttributedRef(MethShiftUnit),
+ List(TypeTree(plainTpe), TypeTree(a), TypeTree(b))),
+ List(expr))
+ })
+ return (stms, res)
+
+ } catch {
+ case ex:TypeError =>
+ unit.error(ex.pos, "cannot cps-transform expression " + tree + ": " + ex.msg)
+ }
+ }
+
+ } else if (!cpsR.isDefined && bot.isDefined) {
+ // error!
+ log("cps type error: " + expr)
+ //println("cps type error: " + expr + "/" + expr.tpe + "/" + getAnswerTypeAnn(expr.tpe))
+
+ println(cpsR + "/" + spc + "/" + bot)
+
+ unit.error(tree.pos, "found cps expression in non-cps position")
+ } else {
+ // all is well
+
+ if (expr.tpe.hasAnnotation(MarkerCPSAdaptPlus)) {
+ unit.warning(tree.pos, "expression " + expr + " of type " + expr.tpe + " is not expected to have a cps type")
+ expr.setType(removeAllCPSAnnotations(expr.tpe))
+ }
+
+ // TODO: sanity check that types agree
+ }
+
+ (stms, expr)
+ }
+
+ def transInlineValue(tree: Tree, cpsA: CPSInfo): (List[Tree], Tree, CPSInfo) = {
+
+ val (stms, expr, spc) = transValue(tree, cpsA, None) // never required to be cps
+
+ getAnswerTypeAnn(expr.tpe) match {
+ case spcVal @ Some(_) =>
+
+ val valueTpe = removeAllCPSAnnotations(expr.tpe)
+
+ val sym = currentOwner.newValue(tree.pos, unit.fresh.newName(tree.pos, "tmp"))
+ .setInfo(valueTpe)
+ .setFlag(Flags.SYNTHETIC)
+ .setAnnotations(List(AnnotationInfo(MarkerCPSSym.tpe, Nil, Nil)))
+
+ (stms ::: List(ValDef(sym, expr) setType(NoType)),
+ Ident(sym) setType(valueTpe) setPos(tree.pos), linearize(spc, spcVal)(unit, tree.pos))
+
+ case _ =>
+ (stms, expr, spc)
+ }
+
+ }
+
+
+
+ def transInlineStm(stm: Tree, cpsA: CPSInfo): (List[Tree], CPSInfo) = {
+ stm match {
+
+ // TODO: what about DefDefs?
+ // TODO: relation to top-level val def?
+ // TODO: what about lazy vals?
+
+ case tree @ ValDef(mods, name, tpt, rhs) =>
+ val (stms, anfRhs, spc) = atOwner(tree.symbol) { transValue(rhs, cpsA, None) }
+
+ val tv = new ChangeOwnerTraverser(tree.symbol, currentOwner)
+ stms.foreach(tv.traverse(_))
+
+ // TODO: symbol might already have annotation. Should check conformance
+ // TODO: better yet: do without annotations on symbols
+
+ val spcVal = getAnswerTypeAnn(anfRhs.tpe)
+ if (spcVal.isDefined) {
+ tree.symbol.setAnnotations(List(AnnotationInfo(MarkerCPSSym.tpe, Nil, Nil)))
+ }
+
+ (stms:::List(treeCopy.ValDef(tree, mods, name, tpt, anfRhs)), linearize(spc, spcVal)(unit, tree.pos))
+
+ case _ =>
+ val (headStms, headExpr, headSpc) = transInlineValue(stm, cpsA)
+ val valSpc = getAnswerTypeAnn(headExpr.tpe)
+ (headStms:::List(headExpr), linearize(headSpc, valSpc)(unit, stm.pos))
+ }
+ }
+
+ def transBlock(stms: List[Tree], expr: Tree, cpsA: CPSInfo, cpsR: CPSInfo): (List[Tree], Tree) = {
+ stms match {
+ case Nil =>
+ transTailValue(expr, cpsA, cpsR)
+
+ case stm::rest =>
+ var (rest2, expr2) = (rest, expr)
+ val (headStms, headSpc) = transInlineStm(stm, cpsA)
+ val (restStms, restExpr) = transBlock(rest2, expr2, headSpc, cpsR)
+ (headStms:::restStms, restExpr)
+ }
+ }
+
+
+ }
+}
diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSPlugin.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSPlugin.scala
new file mode 100644
index 0000000000..a16e9b9a4c
--- /dev/null
+++ b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSPlugin.scala
@@ -0,0 +1,60 @@
+// $Id$
+
+package scala.tools.selectivecps
+
+import scala.tools.nsc
+import scala.tools.nsc.typechecker._
+import nsc.Global
+import nsc.Phase
+import nsc.plugins.Plugin
+import nsc.plugins.PluginComponent
+
+class SelectiveCPSPlugin(val global: Global) extends Plugin {
+ import global._
+
+ val name = "continuations"
+ val description = "applies selective cps conversion"
+
+ val anfPhase = new SelectiveANFTransform() {
+ val global = SelectiveCPSPlugin.this.global
+ val runsAfter = List("pickler")
+ }
+
+ val cpsPhase = new SelectiveCPSTransform() {
+ val global = SelectiveCPSPlugin.this.global
+ val runsAfter = List("selectiveanf")
+ }
+
+
+ val components = List[PluginComponent](anfPhase, cpsPhase)
+
+ val checker = new CPSAnnotationChecker {
+ val global: SelectiveCPSPlugin.this.global.type = SelectiveCPSPlugin.this.global
+ }
+ global.addAnnotationChecker(checker.checker)
+
+ global.log("instantiated cps plugin: " + this)
+
+ def setEnabled(flag: Boolean) = {
+ checker.cpsEnabled = flag
+ anfPhase.cpsEnabled = flag
+ cpsPhase.cpsEnabled = flag
+ }
+
+ // TODO: require -enabled command-line flag
+
+ override def processOptions(options: List[String], error: String => Unit) = {
+ var enabled = false
+ for (option <- options) {
+ if (option == "enable") {
+ enabled = true
+ } else {
+ error("Option not understood: "+option)
+ }
+ }
+ setEnabled(enabled)
+ }
+
+ override val optionsHelp: Option[String] =
+ Some(" -P:continuations:enable Enable continuations")
+}
diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala
new file mode 100644
index 0000000000..6da56f93d4
--- /dev/null
+++ b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala
@@ -0,0 +1,384 @@
+// $Id$
+
+package scala.tools.selectivecps
+
+import scala.collection._
+
+import scala.tools.nsc._
+import scala.tools.nsc.transform._
+import scala.tools.nsc.plugins._
+
+import scala.tools.nsc.ast.TreeBrowsers
+import scala.tools.nsc.ast._
+
+/**
+ * In methods marked @cps, CPS-transform assignments introduced by ANF-transform phase.
+ */
+abstract class SelectiveCPSTransform extends PluginComponent with
+ InfoTransform with TypingTransformers with CPSUtils {
+ // inherits abstract value `global' and class `Phase' from Transform
+
+ import global._ // the global environment
+ import definitions._ // standard classes and methods
+ import typer.atOwner // methods to type trees
+
+ /** the following two members override abstract members in Transform */
+ val phaseName: String = "selectivecps"
+
+ protected def newTransformer(unit: CompilationUnit): Transformer =
+ new CPSTransformer(unit)
+
+ /** This class does not change linearization */
+ override def changesBaseClasses = false
+
+ /** - return symbol's transformed type,
+ */
+ def transformInfo(sym: Symbol, tp: Type): Type = {
+ if (!cpsEnabled) return tp
+
+ val newtp = transformCPSType(tp)
+
+ if (newtp != tp)
+ log("transformInfo changed type for " + sym + " to " + newtp);
+
+ if (sym == MethReifyR)
+ log("transformInfo (not)changed type for " + sym + " to " + newtp);
+
+ newtp
+ }
+
+ def transformCPSType(tp: Type): Type = { // TODO: use a TypeMap? need to handle more cases?
+ tp match {
+ case PolyType(params,res) => PolyType(params, transformCPSType(res))
+ case MethodType(params,res) =>
+ MethodType(params, transformCPSType(res))
+ case TypeRef(pre, sym, args) => TypeRef(pre, sym, args.map(transformCPSType(_)))
+ case _ =>
+ getExternalAnswerTypeAnn(tp) match {
+ case Some((res, outer)) =>
+ appliedType(Context.tpe, List(removeAllCPSAnnotations(tp), res, outer))
+ case _ =>
+ removeAllCPSAnnotations(tp)
+ }
+ }
+ }
+
+
+ class CPSTransformer(unit: CompilationUnit) extends TypingTransformer(unit) {
+
+ override def transform(tree: Tree): Tree = {
+ if (!cpsEnabled) return tree
+ postTransform(mainTransform(tree))
+ }
+
+ def postTransform(tree: Tree): Tree = {
+ tree.setType(transformCPSType(tree.tpe))
+ }
+
+
+ def mainTransform(tree: Tree): Tree = {
+ tree match {
+
+ // TODO: can we generalize this?
+
+ case Apply(TypeApply(fun, targs), args)
+ if (fun.symbol == MethShift) =>
+ log("found shift: " + tree)
+ atPos(tree.pos) {
+ val funR = gen.mkAttributedRef(MethShiftR) // TODO: correct?
+ //gen.mkAttributedSelect(gen.mkAttributedSelect(gen.mkAttributedSelect(gen.mkAttributedIdent(ScalaPackage),
+ //ScalaPackage.tpe.member("util")), ScalaPackage.tpe.member("util").tpe.member("continuations")), MethShiftR)
+ //gen.mkAttributedRef(ModCPS.tpe, MethShiftR) // TODO: correct?
+ log(funR.tpe)
+ Apply(
+ TypeApply(funR, targs).setType(appliedType(funR.tpe, targs.map((t:Tree) => t.tpe))),
+ args.map(transform(_))
+ ).setType(transformCPSType(tree.tpe))
+ }
+
+ case Apply(TypeApply(fun, targs), args)
+ if (fun.symbol == MethShiftUnit) =>
+ log("found shiftUnit: " + tree)
+ atPos(tree.pos) {
+ val funR = gen.mkAttributedRef(MethShiftUnitR) // TODO: correct?
+ log(funR.tpe)
+ Apply(
+ TypeApply(funR, List(targs(0), targs(1))).setType(appliedType(funR.tpe,
+ List(targs(0).tpe, targs(1).tpe))),
+ args.map(transform(_))
+ ).setType(appliedType(Context.tpe, List(targs(0).tpe,targs(1).tpe,targs(1).tpe)))
+ }
+
+ case Apply(TypeApply(fun, targs), args)
+ if (fun.symbol == MethReify) =>
+ log("found reify: " + tree)
+ atPos(tree.pos) {
+ val funR = gen.mkAttributedRef(MethReifyR) // TODO: correct?
+ log(funR.tpe)
+ Apply(
+ TypeApply(funR, targs).setType(appliedType(funR.tpe, targs.map((t:Tree) => t.tpe))),
+ args.map(transform(_))
+ ).setType(transformCPSType(tree.tpe))
+ }
+
+ case Try(block, catches, finalizer) =>
+ // currently duplicates the catch block into a partial function.
+ // this is kinda risky, but we don't expect there will be lots
+ // of try/catches inside catch blocks (exp. blowup unlikely).
+
+ // CAVEAT: finalizers are surprisingly tricky!
+ // the problem is that they cannot easily be removed
+ // from the regular control path and hence will
+ // also be invoked after creating the Context object.
+
+ /*
+ object Test {
+ def foo1 = {
+ throw new Exception("in sub")
+ shift((k:Int=>Int) => k(1))
+ 10
+ }
+ def foo2 = {
+ shift((k:Int=>Int) => k(2))
+ 20
+ }
+ def foo3 = {
+ shift((k:Int=>Int) => k(3))
+ throw new Exception("in sub")
+ 30
+ }
+ def foo4 = {
+ shift((k:Int=>Int) => 4)
+ throw new Exception("in sub")
+ 40
+ }
+ def bar(x: Int) = try {
+ if (x == 1)
+ foo1
+ else if (x == 2)
+ foo2
+ else if (x == 3)
+ foo3
+ else //if (x == 4)
+ foo4
+ } catch {
+ case _ =>
+ println("exception")
+ 0
+ } finally {
+ println("done")
+ }
+ }
+
+ reset(Test.bar(1)) // should print: exception,done,0
+ reset(Test.bar(2)) // should print: done,20 <-- but prints: done,done,20
+ reset(Test.bar(3)) // should print: exception,done,0 <-- but prints: done,exception,done,0
+ reset(Test.bar(4)) // should print: 4 <-- but prints: done,4
+ */
+
+ val block1 = transform(block)
+ val catches1 = transformCaseDefs(catches)
+ val finalizer1 = transform(finalizer)
+
+ if (hasAnswerTypeAnn(tree.tpe)) {
+ //vprintln("CPS Transform: " + tree + "/" + tree.tpe + "/" + block1.tpe)
+
+ val (stms, expr1) = block1 match {
+ case Block(stms, expr) => (stms, expr)
+ case expr => (Nil, expr)
+ }
+
+ val targettp = transformCPSType(tree.tpe)
+
+// val expr2 = if (catches.nonEmpty) {
+ val pos = catches.head.pos
+ val argSym = currentOwner.newValueParameter(pos, "$ex").setInfo(ThrowableClass.tpe)
+ val rhs = Match(Ident(argSym), catches1)
+ val fun = Function(List(ValDef(argSym)), rhs)
+ val funSym = currentOwner.newValueParameter(pos, "$catches").setInfo(appliedType(PartialFunctionClass.tpe, List(ThrowableClass.tpe, targettp)))
+ val funDef = localTyper.typed(atPos(pos) { ValDef(funSym, fun) })
+ val expr2 = localTyper.typed(atPos(pos) { Apply(Select(expr1, expr1.tpe.member("flatMapCatch")), List(Ident(funSym))) })
+
+ argSym.owner = fun.symbol
+ val chown = new ChangeOwnerTraverser(currentOwner, fun.symbol)
+ chown.traverse(rhs)
+
+ val exSym = currentOwner.newValueParameter(pos, "$ex").setInfo(ThrowableClass.tpe)
+ val catch2 = { localTyper.typedCases(tree, List(
+ CaseDef(Bind(exSym, Typed(Ident("_"), TypeTree(ThrowableClass.tpe))),
+ Apply(Select(Ident(funSym), "isDefinedAt"), List(Ident(exSym))),
+ Apply(Ident(funSym), List(Ident(exSym))))
+ ), ThrowableClass.tpe, targettp) }
+
+ //typedCases(tree, catches, ThrowableClass.tpe, pt)
+
+ localTyper.typed(Block(List(funDef), treeCopy.Try(tree, treeCopy.Block(block1, stms, expr2), catch2, finalizer1)))
+
+
+/*
+ disabled for now - see notes above
+
+ val expr3 = if (!finalizer.isEmpty) {
+ val pos = finalizer.pos
+ val finalizer2 = duplicateTree(finalizer1)
+ val fun = Function(List(), finalizer2)
+ val expr3 = localTyper.typed(atPos(pos) { Apply(Select(expr2, expr2.tpe.member("mapFinally")), List(fun)) })
+
+ val chown = new ChangeOwnerTraverser(currentOwner, fun.symbol)
+ chown.traverse(finalizer2)
+
+ expr3
+ } else
+ expr2
+*/
+ } else {
+ treeCopy.Try(tree, block1, catches1, finalizer1)
+ }
+
+ case Block(stms, expr) =>
+
+ val (stms1, expr1) = transBlock(stms, expr)
+ treeCopy.Block(tree, stms1, expr1)
+
+ case _ =>
+ super.transform(tree)
+ }
+ }
+
+
+
+ def transBlock(stms: List[Tree], expr: Tree): (List[Tree], Tree) = {
+
+ stms match {
+ case Nil =>
+ (Nil, transform(expr))
+
+ case stm::rest =>
+
+ stm match {
+ case vd @ ValDef(mods, name, tpt, rhs)
+ if (vd.symbol.hasAnnotation(MarkerCPSSym)) =>
+
+ log("found marked ValDef "+name+" of type " + vd.symbol.tpe)
+
+ val tpe = vd.symbol.tpe
+ val rhs1 = atOwner(vd.symbol) { transform(rhs) }
+
+ new ChangeOwnerTraverser(vd.symbol, currentOwner).traverse(rhs1) // TODO: don't traverse twice
+
+ log("valdef symbol " + vd.symbol + " has type " + tpe)
+ log("right hand side " + rhs1 + " has type " + rhs1.tpe)
+
+ log("currentOwner: " + currentOwner)
+ log("currentMethod: " + currentMethod)
+
+ val (bodyStms, bodyExpr) = transBlock(rest, expr)
+ // FIXME: result will later be traversed again by TreeSymSubstituter and
+ // ChangeOwnerTraverser => exp. running time.
+ // Should be changed to fuse traversals into one.
+
+ val specialCaseTrivial = bodyExpr match {
+ case Apply(fun, args) =>
+ // for now, look for explicit tail calls only.
+ // are there other cases that could profit from specializing on
+ // trivial contexts as well?
+ (bodyExpr.tpe.typeSymbol == Context) && (currentMethod == fun.symbol)
+ case _ => false
+ }
+
+ def applyTrivial(ctxValSym: Symbol, body: Tree) = {
+
+ new TreeSymSubstituter(List(vd.symbol), List(ctxValSym)).traverse(body)
+
+ val body2 = localTyper.typed(atPos(vd.symbol.pos) { body })
+
+ // in theory it would be nicer to look for an @cps annotation instead
+ // of testing for Context
+ if ((body2.tpe == null) || !(body2.tpe.typeSymbol == Context)) {
+ //println(body2 + "/" + body2.tpe)
+ unit.error(rhs.pos, "cannot compute type for CPS-transformed function result")
+ }
+ body2
+ }
+
+ def applyCombinatorFun(ctxR: Tree, body: Tree) = {
+ val arg = currentOwner.newValueParameter(ctxR.pos, name).setInfo(tpe)
+ new TreeSymSubstituter(List(vd.symbol), List(arg)).traverse(body)
+ val fun = localTyper.typed(atPos(vd.symbol.pos) { Function(List(ValDef(arg)), body) }) // types body as well
+ arg.owner = fun.symbol
+ new ChangeOwnerTraverser(currentOwner, fun.symbol).traverse(body)
+
+ // see note about multiple traversals above
+
+ log("fun.symbol: "+fun.symbol)
+ log("fun.symbol.owner: "+fun.symbol.owner)
+ log("arg.owner: "+arg.owner)
+
+ log("fun.tpe:"+fun.tpe)
+ log("return type of fun:"+body.tpe)
+
+ var methodName = "map"
+
+ if (body.tpe != null) {
+ if (body.tpe.typeSymbol == Context)
+ methodName = "flatMap"
+ }
+ else
+ unit.error(rhs.pos, "cannot compute type for CPS-transformed function result")
+
+ log("will use method:"+methodName)
+
+ localTyper.typed(atPos(vd.symbol.pos) {
+ Apply(Select(ctxR, ctxR.tpe.member(methodName)), List(fun))
+ })
+ }
+
+ def mkBlock(stms: List[Tree], expr: Tree) = if (stms.nonEmpty) Block(stms, expr) else expr
+
+ try {
+ if (specialCaseTrivial) {
+ log("will optimize possible tail call: " + bodyExpr)
+
+ // FIXME: flatMap impl has become more complicated due to
+ // exceptions. do we need to put a try/catch in the then part??
+
+ // val ctx = <rhs>
+ // if (ctx.isTrivial)
+ // val <lhs> = ctx.getTrivialValue; ... <--- TODO: try/catch ??? don't bother for the moment...
+ // else
+ // ctx.flatMap { <lhs> => ... }
+ val ctxSym = currentOwner.newValue(vd.symbol.name + "$shift").setInfo(rhs1.tpe)
+ val ctxDef = localTyper.typed(ValDef(ctxSym, rhs1))
+ def ctxRef = localTyper.typed(Ident(ctxSym))
+ val argSym = currentOwner.newValue(vd.symbol.name).setInfo(tpe)
+ val argDef = localTyper.typed(ValDef(argSym, Select(ctxRef, ctxRef.tpe.member("getTrivialValue"))))
+ val switchExpr = localTyper.typed(atPos(vd.symbol.pos) {
+ val body2 = duplicateTree(mkBlock(bodyStms, bodyExpr)) // dup before typing!
+ If(Select(ctxRef, ctxSym.tpe.member("isTrivial")),
+ applyTrivial(argSym, mkBlock(argDef::bodyStms, bodyExpr)),
+ applyCombinatorFun(ctxRef, body2))
+ })
+ (List(ctxDef), switchExpr)
+ } else {
+ // ctx.flatMap { <lhs> => ... }
+ // or
+ // ctx.map { <lhs> => ... }
+ (Nil, applyCombinatorFun(rhs1, mkBlock(bodyStms, bodyExpr)))
+ }
+ } catch {
+ case ex:TypeError =>
+ unit.error(ex.pos, ex.msg)
+ (bodyStms, bodyExpr)
+ }
+
+ case _ =>
+ val stm1 = transform(stm)
+ val (a, b) = transBlock(rest, expr)
+ (stm1::a, b)
+ }
+ }
+ }
+
+
+ }
+}
diff --git a/src/continuations/plugin/scalac-plugin.xml b/src/continuations/plugin/scalac-plugin.xml
new file mode 100644
index 0000000000..04d42655c5
--- /dev/null
+++ b/src/continuations/plugin/scalac-plugin.xml
@@ -0,0 +1,5 @@
+<!-- $Id$ -->
+<plugin>
+ <name>continuations</name>
+ <classname>scala.tools.selectivecps.SelectiveCPSPlugin</classname>
+</plugin>
diff --git a/src/dbc/scala/dbc/datatype/Factory.scala b/src/dbc/scala/dbc/datatype/Factory.scala
index f98c699787..a7640cc642 100644
--- a/src/dbc/scala/dbc/datatype/Factory.scala
+++ b/src/dbc/scala/dbc/datatype/Factory.scala
@@ -22,7 +22,7 @@ object Factory {
final val java_lang_Integer_SIZE = 32;
final val java_lang_Long_SIZE = 64;
- /** Returns a mullable property formated as a boolean option */
+ /** Returns a nullable property formatted as a boolean option */
def isNullable (metadata:java.sql.ResultSetMetaData, index:Int): Option[scala.Boolean] =
metadata.isNullable(index) match {
case java.sql.ResultSetMetaData.columnNoNulls => Some(false);
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JConstantPool.java b/src/fjbg/ch/epfl/lamp/fjbg/JConstantPool.java
index 911acd18da..4fa048177c 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JConstantPool.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JConstantPool.java
@@ -187,6 +187,10 @@ public class JConstantPool {
return addEntry(new Utf8Entry(value));
}
+ public int addUtf8(byte[] value) {
+ return addEntry(new Utf8Entry(value));
+ }
+
public String lookupUtf8(int index) {
Utf8Entry entry = (Utf8Entry)lookupEntry(index);
return entry.getValue();
@@ -344,22 +348,46 @@ public class JConstantPool {
public class Utf8Entry extends ChildlessEntry implements Entry {
private final String value;
- public Utf8Entry(String value) { this.value = value.intern(); }
+ private final byte[] bytes;
+ public Utf8Entry(String value) {
+ this.value = value.intern();
+ this.bytes = null;
+ }
public Utf8Entry(DataInputStream stream) throws IOException {
this(stream.readUTF());
}
+ public Utf8Entry(byte[] bytes) {
+ this.bytes = bytes;
+ this.value = null;
+ }
- public int hashCode() { return value.hashCode(); }
+ public int hashCode() {
+ if (bytes != null) return bytes.hashCode();
+ return value.hashCode();
+ }
public boolean equals(Object o) {
- return o instanceof Utf8Entry && ((Utf8Entry)o).value == value;
+ boolean isEqual = o instanceof Utf8Entry;
+ if (bytes != null) {
+ isEqual = isEqual && ((Utf8Entry)o).bytes == bytes;
+ }
+ else {
+ isEqual = isEqual && ((Utf8Entry)o).value == value;
+ }
+ return isEqual;
}
public int getTag() { return CONSTANT_Utf8; }
public String getValue() { return value; }
+ public byte[] getBytes() { return bytes; }
public int getSize() { return 1; }
public void writeContentsTo(DataOutputStream stream) throws IOException {
- stream.writeUTF(value);
+ if (bytes != null) {
+ stream.writeShort(bytes.length);
+ stream.write(bytes);
+ }
+ else
+ stream.writeUTF(value);
}
}
diff --git a/src/forkjoin/scala/concurrent/forkjoin/RecursiveTask.java b/src/forkjoin/scala/concurrent/forkjoin/RecursiveTask.java
index 1f3110580b..a526f75597 100644
--- a/src/forkjoin/scala/concurrent/forkjoin/RecursiveTask.java
+++ b/src/forkjoin/scala/concurrent/forkjoin/RecursiveTask.java
@@ -37,7 +37,7 @@ package scala.concurrent.forkjoin;
public abstract class RecursiveTask<V> extends ForkJoinTask<V> {
/**
- * Empty contructor for use by subclasses.
+ * Empty constructor for use by subclasses.
*/
protected RecursiveTask() {
}
diff --git a/actors.iml b/src/intellij/actors.iml
index b002792a0c..ace6ec7f62 100644
--- a/actors.iml
+++ b/src/intellij/actors.iml
@@ -7,8 +7,8 @@
</component>
<component name="NewModuleRootManager" inherit-compiler-output="true">
<exclude-output />
- <content url="file://$MODULE_DIR$/src/actors">
- <sourceFolder url="file://$MODULE_DIR$/src/actors" isTestSource="false" />
+ <content url="file://$MODULE_DIR$/../actors">
+ <sourceFolder url="file://$MODULE_DIR$/../actors" isTestSource="false" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
diff --git a/compiler.iml b/src/intellij/compiler.iml
index 97b08d70cd..cc0b64e735 100644
--- a/compiler.iml
+++ b/src/intellij/compiler.iml
@@ -4,18 +4,17 @@
<facet type="Scala" name="Scala">
<configuration>
<option name="takeFromSettings" value="true" />
- <option name="isRelativeToProjectPath" value="true" />
<option name="myScalaCompilerJarPath" value="build/locker/classes/compiler lib/fjbg.jar lib/msil.jar" />
<option name="myScalaSdkJarPath" value="build/locker/classes/library" />
</configuration>
</facet>
</component>
<component name="NewModuleRootManager" inherit-compiler-output="false">
- <output url="file://$MODULE_DIR$/build/quick/classes/compiler" />
- <output-test url="file://$MODULE_DIR$/out/test/compiler" />
+ <output url="file://$MODULE_DIR$/../../build/quick/classes/compiler" />
+ <output-test url="file://$MODULE_DIR$/../../out/test/compiler" />
<exclude-output />
- <content url="file://$MODULE_DIR$/src/compiler">
- <sourceFolder url="file://$MODULE_DIR$/src/compiler" isTestSource="false" />
+ <content url="file://$MODULE_DIR$/../compiler">
+ <sourceFolder url="file://$MODULE_DIR$/../compiler" isTestSource="false" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
diff --git a/dbc.iml b/src/intellij/dbc.iml
index 9c035853e2..d82bda72b4 100644
--- a/dbc.iml
+++ b/src/intellij/dbc.iml
@@ -7,8 +7,8 @@
</component>
<component name="NewModuleRootManager" inherit-compiler-output="true">
<exclude-output />
- <content url="file://$MODULE_DIR$/src/dbc">
- <sourceFolder url="file://$MODULE_DIR$/src/dbc" isTestSource="false" />
+ <content url="file://$MODULE_DIR$/../dbc">
+ <sourceFolder url="file://$MODULE_DIR$/../dbc" isTestSource="false" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
diff --git a/library.iml b/src/intellij/library.iml
index 4d5668de90..a8719634aa 100644
--- a/library.iml
+++ b/src/intellij/library.iml
@@ -7,11 +7,11 @@
</component>
<component name="NewModuleRootManager" inherit-compiler-output="true">
<exclude-output />
- <content url="file://$MODULE_DIR$/META-INF" />
- <content url="file://$MODULE_DIR$/bin" />
- <content url="file://$MODULE_DIR$/lib" />
- <content url="file://$MODULE_DIR$/src/library">
- <sourceFolder url="file://$MODULE_DIR$/src/library" isTestSource="false" />
+ <content url="file://$MODULE_DIR$/../../META-INF" />
+ <content url="file://$MODULE_DIR$/../../bin" />
+ <content url="file://$MODULE_DIR$/../../lib" />
+ <content url="file://$MODULE_DIR$/../library">
+ <sourceFolder url="file://$MODULE_DIR$/../library" isTestSource="false" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
diff --git a/manual.iml b/src/intellij/manual.iml
index 0ee975326b..bbdcfbebc2 100644
--- a/manual.iml
+++ b/src/intellij/manual.iml
@@ -7,8 +7,8 @@
</component>
<component name="NewModuleRootManager" inherit-compiler-output="true">
<exclude-output />
- <content url="file://$MODULE_DIR$/src/manual">
- <sourceFolder url="file://$MODULE_DIR$/src/manual" isTestSource="false" />
+ <content url="file://$MODULE_DIR$/../manual">
+ <sourceFolder url="file://$MODULE_DIR$/../manual" isTestSource="false" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
diff --git a/partest.iml b/src/intellij/partest.iml
index addac88198..3df457abdb 100644
--- a/partest.iml
+++ b/src/intellij/partest.iml
@@ -7,10 +7,10 @@
</component>
<component name="NewModuleRootManager" inherit-compiler-output="true">
<exclude-output />
- <content url="file://$MODULE_DIR$/src/partest">
- <sourceFolder url="file://$MODULE_DIR$/src/partest" isTestSource="false" />
+ <content url="file://$MODULE_DIR$/../partest">
+ <sourceFolder url="file://$MODULE_DIR$/../partest" isTestSource="false" />
</content>
- <content url="file://$MODULE_DIR$/test" />
+ <content url="file://$MODULE_DIR$/../../test" />
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
<orderEntry type="module" module-name="actors" />
diff --git a/src/intellij/scala-lang.ipr b/src/intellij/scala-lang.ipr
new file mode 100644
index 0000000000..2506c74f6a
--- /dev/null
+++ b/src/intellij/scala-lang.ipr
@@ -0,0 +1,1446 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project version="4">
+ <component name="AntConfiguration">
+ <defaultAnt bundledAnt="true" />
+ <buildFile url="file://$PROJECT_DIR$/../../build.xml">
+ <additionalClassPath />
+ <antReference projectDefault="true" />
+ <customJdkName value="" />
+ <maximumHeapSize value="128" />
+ <maximumStackSize value="32" />
+ <properties />
+ </buildFile>
+ </component>
+ <component name="BuildJarProjectSettings">
+ <option name="BUILD_JARS_ON_MAKE" value="false" />
+ </component>
+ <component name="CodeStyleSettingsManager">
+ <option name="PER_PROJECT_SETTINGS">
+ <value>
+ <ADDITIONAL_INDENT_OPTIONS fileType="java">
+ <option name="INDENT_SIZE" value="4" />
+ <option name="CONTINUATION_INDENT_SIZE" value="8" />
+ <option name="TAB_SIZE" value="4" />
+ <option name="USE_TAB_CHARACTER" value="false" />
+ <option name="SMART_TABS" value="false" />
+ <option name="LABEL_INDENT_SIZE" value="0" />
+ <option name="LABEL_INDENT_ABSOLUTE" value="false" />
+ </ADDITIONAL_INDENT_OPTIONS>
+ <ADDITIONAL_INDENT_OPTIONS fileType="jsp">
+ <option name="INDENT_SIZE" value="4" />
+ <option name="CONTINUATION_INDENT_SIZE" value="8" />
+ <option name="TAB_SIZE" value="4" />
+ <option name="USE_TAB_CHARACTER" value="false" />
+ <option name="SMART_TABS" value="false" />
+ <option name="LABEL_INDENT_SIZE" value="0" />
+ <option name="LABEL_INDENT_ABSOLUTE" value="false" />
+ </ADDITIONAL_INDENT_OPTIONS>
+ <ADDITIONAL_INDENT_OPTIONS fileType="xml">
+ <option name="INDENT_SIZE" value="4" />
+ <option name="CONTINUATION_INDENT_SIZE" value="8" />
+ <option name="TAB_SIZE" value="4" />
+ <option name="USE_TAB_CHARACTER" value="false" />
+ <option name="SMART_TABS" value="false" />
+ <option name="LABEL_INDENT_SIZE" value="0" />
+ <option name="LABEL_INDENT_ABSOLUTE" value="false" />
+ </ADDITIONAL_INDENT_OPTIONS>
+ </value>
+ </option>
+ </component>
+ <component name="CompilerAPISettings">
+ <option name="DEBUGGING_INFO" value="true" />
+ <option name="GENERATE_NO_WARNINGS" value="false" />
+ <option name="DEPRECATION" value="true" />
+ <option name="ADDITIONAL_OPTIONS_STRING" value="" />
+ <option name="MAXIMUM_HEAP_SIZE" value="128" />
+ </component>
+ <component name="CompilerConfiguration">
+ <option name="DEFAULT_COMPILER" value="Javac" />
+ <resourceExtensions>
+ <entry name=".+\.(properties|xml|html|dtd|tld)" />
+ <entry name=".+\.(gif|png|jpeg|jpg)" />
+ </resourceExtensions>
+ <wildcardResourcePatterns>
+ <entry name="?*.properties" />
+ <entry name="?*.xml" />
+ <entry name="?*.gif" />
+ <entry name="?*.png" />
+ <entry name="?*.jpeg" />
+ <entry name="?*.jpg" />
+ <entry name="?*.html" />
+ <entry name="?*.dtd" />
+ <entry name="?*.tld" />
+ <entry name="?*.ftl" />
+ </wildcardResourcePatterns>
+ <annotationProcessing enabled="false" useClasspath="true" />
+ </component>
+ <component name="CopyrightManager" default="">
+ <module2copyright />
+ </component>
+ <component name="DependencyValidationManager">
+ <option name="SKIP_IMPORT_STATEMENTS" value="false" />
+ </component>
+ <component name="EclipseCompilerSettings">
+ <option name="DEBUGGING_INFO" value="true" />
+ <option name="GENERATE_NO_WARNINGS" value="true" />
+ <option name="DEPRECATION" value="false" />
+ <option name="ADDITIONAL_OPTIONS_STRING" value="" />
+ <option name="MAXIMUM_HEAP_SIZE" value="128" />
+ </component>
+ <component name="EclipseEmbeddedCompilerSettings">
+ <option name="DEBUGGING_INFO" value="true" />
+ <option name="GENERATE_NO_WARNINGS" value="true" />
+ <option name="DEPRECATION" value="false" />
+ <option name="ADDITIONAL_OPTIONS_STRING" value="" />
+ <option name="MAXIMUM_HEAP_SIZE" value="128" />
+ </component>
+ <component name="Encoding" useUTFGuessing="true" native2AsciiForPropertiesFiles="false" />
+ <component name="FacetAutodetectingManager">
+ <autodetection-disabled>
+ <facet-type id="Scala">
+ <modules>
+ <module name="files">
+ <files>
+ <file url="file://$PROJECT_DIR$/../../test/files/android/HelloAndroid.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/ant/fsc.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/ant/scalac.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/ant/scaladoc.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/cldc/randoms.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/cli/test1/Main.check.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/cli/test1/Main.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/cli/test2/Main.check.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/cli/test2/Main.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/cli/test3/Main.check.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/cli/test3/Main.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm/JavaInteraction.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm/bigints.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm/bug560bis.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm/inner.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm/manifests.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm/methvsfield.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm/natives.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm/nest.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm/protectedacc.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm/serialization.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm/t0632.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm/t1116.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm/t1143-2/t1143-2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm/t1143.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm/typerep.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm/unittest_io.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm/unittest_xml.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm/xml01.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm/xml02.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm/xml03syntax.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm/xml04embed.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm/xmlattr.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm/xmlmore.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm/xmlpull.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm/xmlstuff.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm5/annotations.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm5/bug676.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm5/bug680.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm5/console.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm5/genericNest.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm5/interpreter.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm5/outerEnum.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm5/stringbuilder.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm5/sync-var.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm5/t0014.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm5/t1461.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm5/t1464/MyTrait.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm5/throws-annot.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/jvm5/typerep.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/abstract.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/accesses.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/annot-nonconst.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/badtok-1.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/badtok-2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/badtok-3.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug1010.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug1011.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug1017.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug1041.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug1106.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug1112.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug112706A.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug1181.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug1183.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug1224.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug1241.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug1275.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug1392.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug1523.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug1623.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug1838.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug200.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug276.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug278.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug284.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug343.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug391.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug409.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug412.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug414.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug418.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug421.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug452.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug473.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug500.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug501.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug510.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug512.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug515.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug520.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug521.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug545.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug550.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug555.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug556.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug558.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug562.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug563.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug565.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug576.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug585.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug588.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug591.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug593.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug608.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug630.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug631.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug633.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug639.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug649.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug650.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug663.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug664.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug667.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug668.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug677.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug691.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug692.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug693.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug696.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug700.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug708.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug712.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug715.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug729.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug752.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug765.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug766.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug779.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug783.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug798.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug800.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug835.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug836.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug845.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug846.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug856.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug875.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug876.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug877.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug882.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug900.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug908.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug909.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug910.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug935.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug944.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug960.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug961.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug987.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/bug997.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/checksensible.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/constrs.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/cyclics.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/divergent-implicit.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/faculty.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/forward.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/gadts1.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/higherkind_novalue.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/imp2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/implicits.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/lazy-override.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/lazyvals.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/lubs.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/mixins.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/multi-array.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/nopredefs.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/null-unsoundness.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/overload.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/parstar.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/patmatexhaust.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/patternalts.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/plugin-after-terminal/src/ThePlugin.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/plugin-after-terminal/testsource.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/plugin-before-parser/src/ThePlugin.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/plugin-before-parser/testsource.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/plugin-cyclic-dependency/src/ThePlugin.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/plugin-cyclic-dependency/testsource.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/plugin-multiple-rafter/src/ThePlugin.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/plugin-multiple-rafter/testsource.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/plugin-rafter-before-1/src/ThePlugin.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/plugin-rafter-before-1/testsource.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/plugin-rightafter-terminal/src/ThePlugin.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/plugin-rightafter-terminal/testsource.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/sabin2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/saito.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/sensitive.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/structural.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/switch.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t0003.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t0015.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t0117.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t0152.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t0204.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t0207.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t0209.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t0214.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t0218.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t0226.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t0259.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t0345.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t0351.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t0503.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t0528neg.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t0590.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t0606.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t0673/Test.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t0699/A.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t0699/B.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t0764.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t0842.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t0899.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t0903.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t1009.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t1033.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t1049.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t1163.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t1168.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t1215.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t1371.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/t1659.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/tailrec.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/tcpoly_bounds.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/tcpoly_override.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/tcpoly_typealias.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/tcpoly_variance.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/tcpoly_variance_enforce.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/typeerror.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/unreachablechar.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/variances.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/viewtest.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/volatile-intersection.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/volatile.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/wellkinded_app.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/wellkinded_app2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/wellkinded_bounds.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/wellkinded_wrongarity.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/wellkinded_wrongarity2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/xmlcorner.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/xmltruncated1.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/xmltruncated2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/xmltruncated3.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/xmltruncated4.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/xmltruncated5.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/neg/xmltruncated6.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/A.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/List1.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/MailBox.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/NoCyclicReference.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/S1.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/S3.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/S5.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/S8.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/Transactions.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/X.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/Z.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/abstract.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/aliases.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/annot-inner.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/annotations.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/arrays2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/attributes.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bounds.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0002.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0017.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0020.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0029.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0030.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0031.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0032.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0036.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0039.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0049.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0053.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0054.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0061.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0064.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0066.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0068.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0069.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0076.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0081.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0082.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0085.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0091.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0093.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0123.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0204.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0304.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0325.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0422.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0599.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug0646.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1000.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1001.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1006.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1014.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1034.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1049.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1050.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1056.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1070.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1075.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1085.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1087.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1090.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1107.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1119.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1123.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug112606A.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1136.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug115.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug116.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1168.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1185.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug119.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1203.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug121.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1210.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1210a.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug122.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1237.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug124.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1241.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1279a.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1292.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1385.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug151.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1565.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug159.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug160.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug175.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug177.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug183.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug1858.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug201.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug210.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug211.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug229.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug245.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug247.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug262.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug267.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug284.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug287.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug289.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug295.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug296.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug304.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug318.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug319.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug342.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug344.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug348plus.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug359.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug360.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug361.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug372.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug374.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug389.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug397.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug402.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug404.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug415.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug419.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug422.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug430-feb09.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug430.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug432.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug439.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug443.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug460.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug514.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug516.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug522.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug530.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug531.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug532.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug533.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug566.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug577.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug592.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug595.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug596.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug599.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug602.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug604.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug607.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug611.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug613.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug615.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug616.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug628.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug640.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug651.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug661.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug675.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug684.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug690.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug694.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug697.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug698.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug703.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug704.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug711.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug720.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug756.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug757.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug757a.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug758.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug759.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug762.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug767.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug780.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug788.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug789.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug796.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug802.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug803.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug805.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug807.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug812.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug839.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug851.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug873.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug880.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug892.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug911.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug927.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/bug946.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/builders.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/caseaccs.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/cfcrash.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/channels.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/cls.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/cls1.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/clsrefine.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/code.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/collections.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/comp-rec-test.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/compile.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/compile1.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/compound.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/constfold.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/contrib467.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/contrib701.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/cyclics.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/escapes2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/eta.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/exceptions.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/functions.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/gadt-gilles.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/gadts2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/gosh.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/gui.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/homonym.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/imp2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/imports.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/infer.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/infer2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/inferbroadtype.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/init.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/itay.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/jesper.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/kinzer.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/ksbug1.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/lambda.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/lambdalift.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/lambdalift1.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/largecasetest.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/listpattern.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/localmodules.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/looping-jsig.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/manifest1.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/matchStarlift.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/matthias1.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/matthias3.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/matthias4.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/matthias5.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/maxim1.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/michel1.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/michel2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/michel3.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/michel4.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/michel5.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/michel6.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/mixins.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/modules.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/modules1.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/moduletrans.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/needstypeearly.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/nested.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/nested2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/null.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/nullary.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/nullary_poly.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/override.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/partialfun.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/pat_gilles.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/pat_iuli.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/patterns.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/patterns1.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/patterns1213.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/patterns2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/patterns3.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/philippe1.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/philippe2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/philippe3.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/philippe4.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/pmbug.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/proj-rec-test.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/propagate.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/protected-t1010.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/rebind.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/return_thistype.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/scoping1.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/scoping2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/scoping3.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/seqtest2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/simplelists.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/stable.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/strings.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/sudoku.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0055.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0154.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0165.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0227.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0231.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0273.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0288/Foo.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0301.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0438.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0453.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0504.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0586.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0591.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0651.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0654.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0674.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0710.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0770.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0774/deathname.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0774/unrelated.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0786.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0851.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0872.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0904.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0905.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t0999.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t1001.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t1027.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t1049.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t1059.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t1087.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t1107/O.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t1107/T.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t1131.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t1146.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t1147.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t1159.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t1164.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t1280.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t1380/hallo.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t1391.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t1409/ConcreteImpl.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t1438.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t1439.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t1480.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t1648.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t1675.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t1761.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t1789.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/t1840/S.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/tcpoly_boundedmonad.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/tcpoly_bounds1.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/tcpoly_checkkinds_mix.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/tcpoly_gm.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/tcpoly_higherorder_bound_method.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/tcpoly_late_method_params.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/tcpoly_method.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/tcpoly_overloaded.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/tcpoly_param_scoping.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/tcpoly_poly.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/tcpoly_return_overriding.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/tcpoly_seq.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/tcpoly_seq_typealias.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/tcpoly_subst.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/tcpoly_typeapp.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/tcpoly_typesub.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/tcpoly_variance.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/tcpoly_wildcards.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/ted.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/test1.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/test2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/test4.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/test4a.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/test4refine.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/test5.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/test5refine.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/testcast.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/thistype.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/thistypes.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/ticket0137.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/tinondefcons.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/traits.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/tryexpr.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/typealias_dubious.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/typealiases.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/typerep-stephane.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/typerep.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/typesafecons.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/unapply.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/unapplyComplex.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/unapplyContexts2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/unapplyGeneric.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/unapplyNeedsMemberType.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/unapplySeq.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/unapplyVal.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/unicode-decode.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/valdefs.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/variances.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/viewtest1.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos/viewtest2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos5/chang/Test.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos5/ilya/S.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos5/ilya2/A.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos5/signatures/sig.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos5/t0695/Test.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos5/t1101/S.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos5/t1102/S.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos5/t1150/S.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos5/t1152/S.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos5/t1176/S.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos5/t1196/S.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos5/t1197/S.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos5/t1203/S.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos5/t1230/S.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos5/t1231/S.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos5/t1232/S.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos5/t1263/test.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/pos5/t1711/Seq.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/res/bug597/Main.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/res/bug597/Test.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/res/bug687/QueryA.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/res/bug687/QueryB.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/res/bug722/IfElse.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/res/bug722/Parser.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/res/bug722/ScanBased.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/res/bug735/ScalaExpressions.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/res/bug735/ScalaTyper.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/res/bug743/BracesXXX.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/res/bug743/ParserXXX.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/res/bug785/ScalaNewTyper.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/res/bug785/ScalaTrees.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/res/bug831/NewScalaParserXXX.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/res/bug831/NewScalaTestXXX.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/Course-2002-01.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/Course-2002-02.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/Course-2002-03.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/Course-2002-04.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/Course-2002-05.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/Course-2002-06.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/Course-2002-07.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/Course-2002-08.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/Course-2002-09.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/Course-2002-10.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/Course-2002-13.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/NestedClasses.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/absoverride.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/amp.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/arrays.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/arybufgrow.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/bitsets.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/boolexprs.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/boolord.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/bridges.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/bug0325.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/bug1074.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/bug1192.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/bug1220.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/bug216.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/bug405.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/bug428.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/bug429.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/bug594.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/bug601.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/bug603.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/bug627.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/bug629.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/bug657.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/bug744.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/bug889.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/bug920.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/bug949.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/bug978.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/bugs.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/byname.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/caseclasses.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/checked.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/classof.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/collection-stacks.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/collections.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/colltest.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/complicatedmatch.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/constrained-types.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/constructors.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/contrib674.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/ctor-order.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/deeps.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/docgenerator.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/enums.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/exceptions-2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/exceptions.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/existentials.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/exoticnames.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/fors.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/forvaleq.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/gadts.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/impconvtimes.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/implicits.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/imports.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/infiniteloop.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/infix.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/intmap.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/iq.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/issue192.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/iterables.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/iterators.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/json.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/jtptest.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/lazy-exprs.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/lazy-locals.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/lazy-override.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/lazy-traits.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/lisp.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/lists.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/literals.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/map_test.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/matcharraytail.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/matchbytes.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/matchemptyarray.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/matchintasany.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/matchonstream.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/misc.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/missingparams.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/mixins.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/multi-array.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/overloads.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/patmatnew.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/promotion.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/randomAccessSeq-apply.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/range.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/retclosure.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/retsynch.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/richs.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/runtime-richChar.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/runtime.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/slices.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/sort.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/streams.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/structural.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/syncchannel.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t0005.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t0017.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t0042.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t0091.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t0412.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t0421.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t0485.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t0486.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t0508.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t0528.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t0607.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t0631.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t0663.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t0668.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t0677.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t0700.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t0807.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t0883.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t0911.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t0936.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t1323.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t1368.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t1423.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t1500.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t1501.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t1505.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t1524.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t1535.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t1618.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t1620.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t1718.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t1747.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/t1829.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/tailcalls.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/tcpoly_monads.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/tcpoly_overriding.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/tcpoly_parseridioms.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/try-2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/try.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/tuples.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/typealias_overriding.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/unapply.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/unapplyArray.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/unboxingBug.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/unittest_collection.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/unittest_io.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/verify-ctor.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/run/withIndex.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/scalacheck/list.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/script/fact.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/script/second.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/script/t1015.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/script/t1017.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/script/utf8.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/ackermann.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/ary.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/binarytrees.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/binarytrees.scala-2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/binarytrees.scala-3.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/chameneos.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/except.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/fannkuch.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/fannkuch.scala-2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/fibo.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/harmonic.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/hash.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/hash2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/hello.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/knucleotide.scala-2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/knucleotide.scala-3.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/lists.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/message.scala-2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/nbody.scala-2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/nestedloop.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/nsieve.scala-2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/nsieve.scala-3.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/nsievebits.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/nsievebits.scala-2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/nsievebits.scala-3.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/objinst.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/partialsums.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/partialsums.scala-2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/recursive.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/regexdna.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/spectralnorm.scala-2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/strcat.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/sumcol.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/sumcol.scala-2.scala" />
+ <file url="file://$PROJECT_DIR$/../../test/files/shootout/takfp.scala" />
+ </files>
+ </module>
+ <module name="library">
+ <files>
+ <file url="file://$PROJECT_DIR$/../android-library/scala/ScalaObject.scala" />
+ <file url="file://$PROJECT_DIR$/../android-library/scala/reflect/ScalaBeanInfo.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/Application.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/BigDecimal.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/BigInt.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/Console.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/Math.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/Numeric.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/Ordering.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/Predef.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/Range.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/Symbol.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/collection/JavaConversions.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/collection/immutable/List.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/collection/immutable/PagedSeq.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/collection/mutable/OpenHashMap.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/collection/mutable/StringBuilder.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/collection/mutable/WeakHashMap.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/compat/Platform.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/concurrent/DelayedLazyVal.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/concurrent/jolib.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/concurrent/ops.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/concurrent/pilib.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/io/BufferedSource.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/io/BytePickle.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/io/Codec.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/io/File.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/io/Position.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/io/Source.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/io/UTF8Codec.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/mobile/Code.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/mobile/Location.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/net/Utility.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/ref/PhantomReference.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/ref/ReferenceQueue.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/ref/ReferenceWrapper.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/ref/SoftReference.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/ref/WeakReference.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/reflect/Invocation.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/reflect/Manifest.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/reflect/ScalaBeanInfo.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/runtime/MethodCache.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/runtime/NonLocalReturnControl.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/runtime/RichChar.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/runtime/RichClass.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/runtime/RichDouble.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/runtime/RichException.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/runtime/RichFloat.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/runtime/RichInt.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/runtime/RichLong.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/runtime/RichString.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/runtime/StringAdd.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/testing/Benchmark.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/testing/SUnit.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/testing/Show.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/text/Document.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/ClassLoader.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/DynamicVariable.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/Marshal.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/NameTransformer.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/Properties.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/Random.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/automata/BaseBerrySethi.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/automata/DetWordAutom.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/automata/Inclusion.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/automata/NondetWordAutom.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/automata/SubsetConstruction.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/automata/WordBerrySethi.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/control/Exception.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/grammar/HedgeRHS.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/grammar/TreeRHS.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/logging/ConsoleLogger.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/logging/Logged.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/matching/Regex.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/ast/AbstractSyntax.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/ast/Binders.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/combinator/ImplicitConversions.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/combinator/JavaTokenParsers.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/combinator/PackratParsers.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/combinator/Parsers.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/combinator/RegexParsers.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/combinator/lexical/Lexical.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/combinator/lexical/Scanners.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/combinator/lexical/StdLexical.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/combinator/syntactical/TokenParsers.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/combinator/testing/RegexTest.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/combinator/testing/Tester.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/combinatorold/$tilde.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/combinatorold/ImplicitConversions.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/combinatorold/Parsers.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/combinatorold/lexical/Lexical.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/combinatorold/lexical/Scanners.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/combinatorold/lexical/StdLexical.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/combinatorold/syntactical/BindingParsers.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/combinatorold/syntactical/StdTokenParsers.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/combinatorold/syntactical/TokenParsers.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/combinatorold/testing/Tester.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/input/CharArrayPosition.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/input/CharArrayReader.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/input/CharSequenceReader.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/input/NoPosition.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/input/OffsetPosition.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/input/PagedSeqReader.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/input/Position.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/input/Positional.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/input/Reader.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/input/StreamReader.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/json/JSON.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/json/Lexer.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/json/Parser.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/syntax/StdTokens.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/parsing/syntax/Tokens.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/regexp/Base.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/regexp/PointedHedgeExp.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/regexp/SyntaxError.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/util/regexp/WordExp.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/Atom.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/Attribute.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/Comment.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/Document.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/Elem.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/EntityRef.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/Group.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/HasKeyValue.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/MalformedAttributeException.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/MetaData.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/NamespaceBinding.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/Node.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/NodeBuffer.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/NodeSeq.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/NodeTraverser.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/Null.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/PCData.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/Parsing.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/PrefixedAttribute.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/PrettyPrinter.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/ProcInstr.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/QNode.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/SpecialNode.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/Text.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/TextBuffer.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/TopScope.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/TypeSymbol.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/Unparsed.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/UnprefixedAttribute.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/Utility.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/XML.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/Xhtml.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/dtd/ContentModel.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/dtd/ContentModelParser.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/dtd/DTD.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/dtd/Decl.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/dtd/DocType.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/dtd/DtdTypeSymbol.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/dtd/ElementValidator.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/dtd/ExternalID.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/dtd/Scanner.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/dtd/Tokens.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/dtd/ValidationException.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/factory/Binder.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/factory/LoggedNodeFactory.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/factory/NodeFactory.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/factory/XMLLoader.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/include/CircularIncludeException.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/include/UnavailableResourceException.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/include/XIncludeException.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/include/sax/EncodingHeuristics.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/include/sax/Main.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/include/sax/XIncludeFilter.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/include/sax/XIncluder.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/parsing/ConstructingHandler.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/parsing/ConstructingParser.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/parsing/DefaultMarkupHandler.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/parsing/ExternalSources.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/parsing/FactoryAdapter.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/parsing/FatalError.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/parsing/MarkupHandler.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/parsing/MarkupParser.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/parsing/NoBindingFactoryAdapter.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/parsing/TokenTests.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/parsing/ValidatingMarkupHandler.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/parsing/XhtmlEntities.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/parsing/XhtmlParser.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/path/Expression.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/persistent/CachedFileStorage.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/persistent/Index.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/persistent/IndexedStorage.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/persistent/SetStorage.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/pull/XMLEvent.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/pull/XMLEventReader.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/transform/BasicTransformer.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/transform/RewriteRule.scala" />
+ <file url="file://$PROJECT_DIR$/../dotnet-library/scala/xml/transform/RuleTransformer.scala" />
+ <file url="file://$PROJECT_DIR$/../scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala" />
+ </files>
+ </module>
+ </modules>
+ </facet-type>
+ </autodetection-disabled>
+ </component>
+ <component name="IdProvider" IDEtalkID="522B472C2EA573563CC2BA93160270BE" />
+ <component name="InspectionProjectProfileManager">
+ <list size="5">
+ <item index="0" class="java.lang.String" itemvalue="TYPO" />
+ <item index="1" class="java.lang.String" itemvalue="SERVER PROBLEM" />
+ <item index="2" class="java.lang.String" itemvalue="INFO" />
+ <item index="3" class="java.lang.String" itemvalue="WARNING" />
+ <item index="4" class="java.lang.String" itemvalue="ERROR" />
+ </list>
+ </component>
+ <component name="JavacSettings">
+ <option name="DEBUGGING_INFO" value="true" />
+ <option name="GENERATE_NO_WARNINGS" value="false" />
+ <option name="DEPRECATION" value="true" />
+ <option name="ADDITIONAL_OPTIONS_STRING" value="" />
+ <option name="MAXIMUM_HEAP_SIZE" value="128" />
+ </component>
+ <component name="JavadocGenerationManager">
+ <option name="OUTPUT_DIRECTORY" />
+ <option name="OPTION_SCOPE" value="protected" />
+ <option name="OPTION_HIERARCHY" value="true" />
+ <option name="OPTION_NAVIGATOR" value="true" />
+ <option name="OPTION_INDEX" value="true" />
+ <option name="OPTION_SEPARATE_INDEX" value="true" />
+ <option name="OPTION_DOCUMENT_TAG_USE" value="false" />
+ <option name="OPTION_DOCUMENT_TAG_AUTHOR" value="false" />
+ <option name="OPTION_DOCUMENT_TAG_VERSION" value="false" />
+ <option name="OPTION_DOCUMENT_TAG_DEPRECATED" value="true" />
+ <option name="OPTION_DEPRECATED_LIST" value="true" />
+ <option name="OTHER_OPTIONS" value="" />
+ <option name="HEAP_SIZE" />
+ <option name="LOCALE" />
+ <option name="OPEN_IN_BROWSER" value="true" />
+ </component>
+ <component name="JikesSettings">
+ <option name="JIKES_PATH" value="" />
+ <option name="DEBUGGING_INFO" value="true" />
+ <option name="DEPRECATION" value="true" />
+ <option name="GENERATE_NO_WARNINGS" value="false" />
+ <option name="IS_EMACS_ERRORS_MODE" value="true" />
+ <option name="ADDITIONAL_OPTIONS_STRING" value="" />
+ </component>
+ <component name="Palette2">
+ <group name="Swing">
+ <item class="com.intellij.uiDesigner.HSpacer" tooltip-text="Horizontal Spacer" icon="/com/intellij/uiDesigner/icons/hspacer.png" removable="false" auto-create-binding="false" can-attach-label="false">
+ <default-constraints vsize-policy="1" hsize-policy="6" anchor="0" fill="1" />
+ </item>
+ <item class="com.intellij.uiDesigner.VSpacer" tooltip-text="Vertical Spacer" icon="/com/intellij/uiDesigner/icons/vspacer.png" removable="false" auto-create-binding="false" can-attach-label="false">
+ <default-constraints vsize-policy="6" hsize-policy="1" anchor="0" fill="2" />
+ </item>
+ <item class="javax.swing.JPanel" icon="/com/intellij/uiDesigner/icons/panel.png" removable="false" auto-create-binding="false" can-attach-label="false">
+ <default-constraints vsize-policy="3" hsize-policy="3" anchor="0" fill="3" />
+ </item>
+ <item class="javax.swing.JScrollPane" icon="/com/intellij/uiDesigner/icons/scrollPane.png" removable="false" auto-create-binding="false" can-attach-label="true">
+ <default-constraints vsize-policy="7" hsize-policy="7" anchor="0" fill="3" />
+ </item>
+ <item class="javax.swing.JButton" icon="/com/intellij/uiDesigner/icons/button.png" removable="false" auto-create-binding="true" can-attach-label="false">
+ <default-constraints vsize-policy="0" hsize-policy="3" anchor="0" fill="1" />
+ <initial-values>
+ <property name="text" value="Button" />
+ </initial-values>
+ </item>
+ <item class="javax.swing.JRadioButton" icon="/com/intellij/uiDesigner/icons/radioButton.png" removable="false" auto-create-binding="true" can-attach-label="false">
+ <default-constraints vsize-policy="0" hsize-policy="3" anchor="8" fill="0" />
+ <initial-values>
+ <property name="text" value="RadioButton" />
+ </initial-values>
+ </item>
+ <item class="javax.swing.JCheckBox" icon="/com/intellij/uiDesigner/icons/checkBox.png" removable="false" auto-create-binding="true" can-attach-label="false">
+ <default-constraints vsize-policy="0" hsize-policy="3" anchor="8" fill="0" />
+ <initial-values>
+ <property name="text" value="CheckBox" />
+ </initial-values>
+ </item>
+ <item class="javax.swing.JLabel" icon="/com/intellij/uiDesigner/icons/label.png" removable="false" auto-create-binding="false" can-attach-label="false">
+ <default-constraints vsize-policy="0" hsize-policy="0" anchor="8" fill="0" />
+ <initial-values>
+ <property name="text" value="Label" />
+ </initial-values>
+ </item>
+ <item class="javax.swing.JTextField" icon="/com/intellij/uiDesigner/icons/textField.png" removable="false" auto-create-binding="true" can-attach-label="true">
+ <default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1">
+ <preferred-size width="150" height="-1" />
+ </default-constraints>
+ </item>
+ <item class="javax.swing.JPasswordField" icon="/com/intellij/uiDesigner/icons/passwordField.png" removable="false" auto-create-binding="true" can-attach-label="true">
+ <default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1">
+ <preferred-size width="150" height="-1" />
+ </default-constraints>
+ </item>
+ <item class="javax.swing.JFormattedTextField" icon="/com/intellij/uiDesigner/icons/formattedTextField.png" removable="false" auto-create-binding="true" can-attach-label="true">
+ <default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1">
+ <preferred-size width="150" height="-1" />
+ </default-constraints>
+ </item>
+ <item class="javax.swing.JTextArea" icon="/com/intellij/uiDesigner/icons/textArea.png" removable="false" auto-create-binding="true" can-attach-label="true">
+ <default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
+ <preferred-size width="150" height="50" />
+ </default-constraints>
+ </item>
+ <item class="javax.swing.JTextPane" icon="/com/intellij/uiDesigner/icons/textPane.png" removable="false" auto-create-binding="true" can-attach-label="true">
+ <default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
+ <preferred-size width="150" height="50" />
+ </default-constraints>
+ </item>
+ <item class="javax.swing.JEditorPane" icon="/com/intellij/uiDesigner/icons/editorPane.png" removable="false" auto-create-binding="true" can-attach-label="true">
+ <default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
+ <preferred-size width="150" height="50" />
+ </default-constraints>
+ </item>
+ <item class="javax.swing.JComboBox" icon="/com/intellij/uiDesigner/icons/comboBox.png" removable="false" auto-create-binding="true" can-attach-label="true">
+ <default-constraints vsize-policy="0" hsize-policy="2" anchor="8" fill="1" />
+ </item>
+ <item class="javax.swing.JTable" icon="/com/intellij/uiDesigner/icons/table.png" removable="false" auto-create-binding="true" can-attach-label="false">
+ <default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
+ <preferred-size width="150" height="50" />
+ </default-constraints>
+ </item>
+ <item class="javax.swing.JList" icon="/com/intellij/uiDesigner/icons/list.png" removable="false" auto-create-binding="true" can-attach-label="false">
+ <default-constraints vsize-policy="6" hsize-policy="2" anchor="0" fill="3">
+ <preferred-size width="150" height="50" />
+ </default-constraints>
+ </item>
+ <item class="javax.swing.JTree" icon="/com/intellij/uiDesigner/icons/tree.png" removable="false" auto-create-binding="true" can-attach-label="false">
+ <default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
+ <preferred-size width="150" height="50" />
+ </default-constraints>
+ </item>
+ <item class="javax.swing.JTabbedPane" icon="/com/intellij/uiDesigner/icons/tabbedPane.png" removable="false" auto-create-binding="true" can-attach-label="false">
+ <default-constraints vsize-policy="3" hsize-policy="3" anchor="0" fill="3">
+ <preferred-size width="200" height="200" />
+ </default-constraints>
+ </item>
+ <item class="javax.swing.JSplitPane" icon="/com/intellij/uiDesigner/icons/splitPane.png" removable="false" auto-create-binding="false" can-attach-label="false">
+ <default-constraints vsize-policy="3" hsize-policy="3" anchor="0" fill="3">
+ <preferred-size width="200" height="200" />
+ </default-constraints>
+ </item>
+ <item class="javax.swing.JSpinner" icon="/com/intellij/uiDesigner/icons/spinner.png" removable="false" auto-create-binding="true" can-attach-label="true">
+ <default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1" />
+ </item>
+ <item class="javax.swing.JSlider" icon="/com/intellij/uiDesigner/icons/slider.png" removable="false" auto-create-binding="true" can-attach-label="false">
+ <default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1" />
+ </item>
+ <item class="javax.swing.JSeparator" icon="/com/intellij/uiDesigner/icons/separator.png" removable="false" auto-create-binding="false" can-attach-label="false">
+ <default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3" />
+ </item>
+ <item class="javax.swing.JProgressBar" icon="/com/intellij/uiDesigner/icons/progressbar.png" removable="false" auto-create-binding="true" can-attach-label="false">
+ <default-constraints vsize-policy="0" hsize-policy="6" anchor="0" fill="1" />
+ </item>
+ <item class="javax.swing.JToolBar" icon="/com/intellij/uiDesigner/icons/toolbar.png" removable="false" auto-create-binding="false" can-attach-label="false">
+ <default-constraints vsize-policy="0" hsize-policy="6" anchor="0" fill="1">
+ <preferred-size width="-1" height="20" />
+ </default-constraints>
+ </item>
+ <item class="javax.swing.JToolBar$Separator" icon="/com/intellij/uiDesigner/icons/toolbarSeparator.png" removable="false" auto-create-binding="false" can-attach-label="false">
+ <default-constraints vsize-policy="0" hsize-policy="0" anchor="0" fill="1" />
+ </item>
+ <item class="javax.swing.JScrollBar" icon="/com/intellij/uiDesigner/icons/scrollbar.png" removable="false" auto-create-binding="true" can-attach-label="false">
+ <default-constraints vsize-policy="6" hsize-policy="0" anchor="0" fill="2" />
+ </item>
+ </group>
+ </component>
+ <component name="ProjectDetails">
+ <option name="projectName" value="scala-lang" />
+ </component>
+ <component name="ProjectDictionaryState">
+ <dictionary name="dragos" />
+ <dictionary name="odersky" />
+ </component>
+ <component name="ProjectKey">
+ <option name="state" value="https://lampsvn.epfl.ch/svn-repos/scala/scala/trunk/scala-lang.ipr" />
+ </component>
+ <component name="ProjectModuleManager">
+ <modules>
+ <module fileurl="file://$PROJECT_DIR$/actors.iml" filepath="$PROJECT_DIR$/actors.iml" />
+ <module fileurl="file://$PROJECT_DIR$/compiler.iml" filepath="$PROJECT_DIR$/compiler.iml" />
+ <module fileurl="file://$PROJECT_DIR$/dbc.iml" filepath="$PROJECT_DIR$/dbc.iml" />
+ <module fileurl="file://$PROJECT_DIR$/library.iml" filepath="$PROJECT_DIR$/library.iml" />
+ <module fileurl="file://$PROJECT_DIR$/manual.iml" filepath="$PROJECT_DIR$/manual.iml" />
+ <module fileurl="file://$PROJECT_DIR$/partest.iml" filepath="$PROJECT_DIR$/partest.iml" />
+ <module fileurl="file://$PROJECT_DIR$/scalap.iml" filepath="$PROJECT_DIR$/scalap.iml" />
+ <module fileurl="file://$PROJECT_DIR$/swing.iml" filepath="$PROJECT_DIR$/swing.iml" />
+ </modules>
+ </component>
+ <component name="ProjectRootManager" version="2" languageLevel="JDK_1_5" assert-keyword="true" jdk-15="true" project-jdk-name="1.6" project-jdk-type="JavaSDK">
+ <output url="file://$PROJECT_DIR$/../../out" />
+ </component>
+ <component name="ResourceManagerContainer">
+ <option name="myResourceBundles">
+ <value>
+ <list size="0" />
+ </value>
+ </option>
+ </component>
+ <component name="RmicSettings">
+ <option name="IS_EANABLED" value="false" />
+ <option name="DEBUGGING_INFO" value="true" />
+ <option name="GENERATE_NO_WARNINGS" value="false" />
+ <option name="GENERATE_IIOP_STUBS" value="false" />
+ <option name="ADDITIONAL_OPTIONS_STRING" value="" />
+ </component>
+ <component name="ScalaSettings">
+ <option name="MAXIMUM_HEAP_SIZE" value="1024" />
+ <option name="DEPRECATION" value="false" />
+ <option name="UNCHECKED" value="false" />
+ </component>
+ <component name="SvnBranchConfigurationManager">
+ <option name="myConfigurationMap">
+ <map>
+ <entry key="$PROJECT_DIR$/../..">
+ <value>
+ <SvnBranchConfiguration>
+ <option name="branchMap">
+ <map>
+ <entry key="https://lampsvn.epfl.ch/svn-repos/scala/scala/branches">
+ <value>
+ <list />
+ </value>
+ </entry>
+ <entry key="https://lampsvn.epfl.ch/svn-repos/scala/scala/tags">
+ <value>
+ <list />
+ </value>
+ </entry>
+ </map>
+ </option>
+ <option name="branchUrls">
+ <list>
+ <option value="https://lampsvn.epfl.ch/svn-repos/scala/scala/branches" />
+ <option value="https://lampsvn.epfl.ch/svn-repos/scala/scala/tags" />
+ </list>
+ </option>
+ <option name="trunkUrl" value="https://lampsvn.epfl.ch/svn-repos/scala/scala/trunk" />
+ </SvnBranchConfiguration>
+ </value>
+ </entry>
+ </map>
+ </option>
+ <option name="myVersion" value="124" />
+ <option name="mySupportsUserInfoFilter" value="true" />
+ </component>
+ <component name="VcsDirectoryMappings">
+ <mapping directory="" vcs="svn" />
+ </component>
+ <component name="WebServicesPlugin" addRequiredLibraries="true" />
+ <component name="libraryTable">
+ <library name="Project ant library">
+ <CLASSES>
+ <root url="jar://$PROJECT_DIR$/../../lib/ant/ant-contrib.jar!/" />
+ <root url="jar://$PROJECT_DIR$/../../lib/ant/vizant.jar!/" />
+ <root url="jar://$PROJECT_DIR$/../../lib/ant/maven-ant-tasks-2.0.9.jar!/" />
+ <root url="jar://$PROJECT_DIR$/../../lib/ant/ant-dotnet-1.0.jar!/" />
+ </CLASSES>
+ <JAVADOC />
+ <SOURCES />
+ </library>
+ <library name="Project Scala SDK">
+ <CLASSES>
+ <root url="jar://$PROJECT_DIR$/../../lib/jline.jar!/" />
+ <root url="jar://$PROJECT_DIR$/../../lib/fjbg.jar!/" />
+ <root url="jar://$PROJECT_DIR$/../../lib/ScalaCheck.jar!/" />
+ <root url="jar://$PROJECT_DIR$/../../lib/msil.jar!/" />
+ </CLASSES>
+ <JAVADOC />
+ <SOURCES />
+ </library>
+ <library name="ant">
+ <CLASSES>
+ <root url="jar:///../share/ant/lib/ant.jar!/" />
+ </CLASSES>
+ <JAVADOC />
+ <SOURCES />
+ </library>
+ <library name="locker">
+ <CLASSES>
+ <root url="file://$PROJECT_DIR$/../../build/locker/classes/library" />
+ <root url="file://$PROJECT_DIR$/../../build/locker/classes/compiler" />
+ </CLASSES>
+ <JAVADOC />
+ <SOURCES />
+ </library>
+ </component>
+</project>
+
diff --git a/scalap.iml b/src/intellij/scalap.iml
index be028bff8f..48f98a0b1e 100644
--- a/scalap.iml
+++ b/src/intellij/scalap.iml
@@ -7,8 +7,8 @@
</component>
<component name="NewModuleRootManager" inherit-compiler-output="true">
<exclude-output />
- <content url="file://$MODULE_DIR$/src/scalap">
- <sourceFolder url="file://$MODULE_DIR$/src/scalap" isTestSource="false" />
+ <content url="file://$MODULE_DIR$/../scalap">
+ <sourceFolder url="file://$MODULE_DIR$/../scalap" isTestSource="false" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
diff --git a/swing.iml b/src/intellij/swing.iml
index 8ef3d159e7..c623fe0e72 100644
--- a/swing.iml
+++ b/src/intellij/swing.iml
@@ -7,8 +7,8 @@
</component>
<component name="NewModuleRootManager" inherit-compiler-output="true">
<exclude-output />
- <content url="file://$MODULE_DIR$/src/swing">
- <sourceFolder url="file://$MODULE_DIR$/src/swing" isTestSource="false" />
+ <content url="file://$MODULE_DIR$/../swing">
+ <sourceFolder url="file://$MODULE_DIR$/../swing" isTestSource="false" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
diff --git a/src/library/scala/Application.scala b/src/library/scala/Application.scala
index e9b97b5356..fdb122f5bf 100644
--- a/src/library/scala/Application.scala
+++ b/src/library/scala/Application.scala
@@ -11,7 +11,6 @@
package scala
-import java.lang.System.getProperty
import scala.compat.Platform.currentTime
/** <p>
@@ -84,7 +83,7 @@ trait Application {
* @param args the arguments passed to the main method
*/
def main(args: Array[String]) {
- if (getProperty("scala.time") ne null) {
+ if (util.Properties.propIsSet("scala.time")) {
val total = currentTime - executionStart
Console.println("[total " + total + "ms]")
}
diff --git a/src/library/scala/Array.scala b/src/library/scala/Array.scala
index afaaed7c7c..f89e8b48a5 100644
--- a/src/library/scala/Array.scala
+++ b/src/library/scala/Array.scala
@@ -12,7 +12,7 @@
package scala
import scala.collection.generic._
-import scala.collection.mutable.{ArrayBuilder, GenericArray}
+import scala.collection.mutable.{ArrayBuilder, ArraySeq}
import compat.Platform.arraycopy
import scala.reflect.ClassManifest
import scala.runtime.ScalaRunTime.{array_apply, array_update}
@@ -24,15 +24,15 @@ class FallbackArrayBuilding {
/** A builder factory that generates a generic array.
* Called instead of Array.newBuilder if the element type of an array
- * does not have a class manifest. Note that fallbackBuilder fcatory
+ * does not have a class manifest. Note that fallbackBuilder factory
* needs an implicit parameter (otherwise it would not be dominated in implicit search
* by Array.canBuildFrom). We make sure that that implicit search is always
- * succesfull.
+ * successfull.
*/
- implicit def fallbackCanBuildFrom[T](implicit m: DummyImplicit): CanBuildFrom[Array[_], T, GenericArray[T]] =
- new CanBuildFrom[Array[_], T, GenericArray[T]] {
- def apply(from: Array[_]) = GenericArray.newBuilder[T]
- def apply() = GenericArray.newBuilder[T]
+ implicit def fallbackCanBuildFrom[T](implicit m: DummyImplicit): CanBuildFrom[Array[_], T, ArraySeq[T]] =
+ new CanBuildFrom[Array[_], T, ArraySeq[T]] {
+ def apply(from: Array[_]) = ArraySeq.newBuilder[T]
+ def apply() = ArraySeq.newBuilder[T]
}
}
@@ -55,10 +55,13 @@ object Array extends FallbackArrayBuilding {
dest : AnyRef,
destPos : Int,
length : Int) {
- var i = 0
- while (i < length) {
- array_update(dest, i, array_apply(src, i))
+ var i = srcPos
+ var j = destPos
+ val srcUntil = srcPos + length
+ while (i < srcUntil) {
+ array_update(dest, j, array_apply(src, i))
i += 1
+ j += 1
}
}
diff --git a/src/library/scala/Console.scala b/src/library/scala/Console.scala
index 7923b6be65..fc33fa07ef 100644
--- a/src/library/scala/Console.scala
+++ b/src/library/scala/Console.scala
@@ -83,7 +83,7 @@ object Console {
/** Set the default output stream.
*
- * @param@ out the new output stream.
+ * @param out the new output stream.
*/
def setOut(out: OutputStream): Unit =
setOut(new PrintStream(out))
diff --git a/src/library/scala/Enumeration.scala b/src/library/scala/Enumeration.scala
index dfe48e3d00..3c8f5cf0bd 100644
--- a/src/library/scala/Enumeration.scala
+++ b/src/library/scala/Enumeration.scala
@@ -16,6 +16,15 @@ import scala.collection.mutable.{Builder, AddingBuilder, Map, HashMap}
import scala.collection.immutable.{Set, BitSet}
import scala.collection.generic.CanBuildFrom
+private object Enumeration {
+
+ /* This map is used to cache enumeration instances for
+ resolving enumeration _values_ to equal objects (by-reference)
+ when values are deserialized. */
+ private val emap: Map[Class[_], Enumeration] = new HashMap
+
+}
+
/** <p>
* Defines a finite set of values specific to the enumeration. Typically
* these values enumerate all possible forms something can take and provide a
@@ -56,11 +65,32 @@ import scala.collection.generic.CanBuildFrom
*/
@serializable
@SerialVersionUID(8476000850333817230L)
-abstract class Enumeration(initial: Int, names: String*) {
+abstract class Enumeration(initial: Int, names: String*) { thisenum =>
def this() = this(0, null)
def this(names: String*) = this(0, names: _*)
+ Enumeration.synchronized {
+ Enumeration.emap.get(getClass) match {
+ case None =>
+ Enumeration.emap += (getClass -> this)
+ case Some(_) =>
+ /* do nothing */
+ }
+ }
+
+ /* Note that `readResolve` cannot be private, since otherwise
+ the JVM does not invoke it when deserializing subclasses. */
+ protected def readResolve(): AnyRef = Enumeration.synchronized {
+ Enumeration.emap.get(getClass) match {
+ case None =>
+ Enumeration.emap += (getClass -> this)
+ this
+ case Some(existing) =>
+ existing
+ }
+ }
+
/** The name of this enumeration.
*/
override def toString = {
@@ -90,7 +120,7 @@ abstract class Enumeration(initial: Int, names: String*) {
*/
def values: ValueSet = {
if (!vsetDefined) {
- vset = new ValueSet(BitSet.empty ++ (vmap.valuesIterator map (_.id)))
+ vset = new ValueSet(BitSet.empty ++ (vmap.values map (_.id)))
vsetDefined = true
}
vset
@@ -164,34 +194,41 @@ abstract class Enumeration(initial: Int, names: String*) {
/* Obtains the name for the value with id `i`. If no name is cached
* in `nmap`, it populates `nmap` using reflection.
*/
- private def nameOf(i: Int): String = nmap.get(i) match {
- case Some(name) => name
- case None =>
- val methods = getClass.getMethods
- for (m <- methods
- if classOf[Value].isAssignableFrom(m.getReturnType) &&
- !java.lang.reflect.Modifier.isFinal(m.getModifiers)) {
- val name = m.getName
- // invoke method to obtain actual `Value` instance
- val value = m.invoke(this)
- // invoke `id` method
- val idMeth = classOf[Val].getMethod("id")
- val id: Int = idMeth.invoke(value).asInstanceOf[java.lang.Integer].intValue()
- nmap += (id -> name)
- }
- nmap(i)
+ private def nameOf(i: Int): String = synchronized {
+ def isValDef(m: java.lang.reflect.Method) =
+ getClass.getDeclaredFields.exists(fd => fd.getName == m.getName &&
+ fd.getType == m.getReturnType)
+ nmap.get(i) match {
+ case Some(name) => name
+ case None =>
+ val methods = getClass.getMethods
+ for (m <- methods
+ if (classOf[Value].isAssignableFrom(m.getReturnType) &&
+ !java.lang.reflect.Modifier.isFinal(m.getModifiers) &&
+ m.getParameterTypes.isEmpty &&
+ isValDef(m))) {
+ val name = m.getName
+ // invoke method to obtain actual `Value` instance
+ val value = m.invoke(this)
+ // invoke `id` method
+ val idMeth = classOf[Val].getMethod("id")
+ val id: Int = idMeth.invoke(value).asInstanceOf[java.lang.Integer].intValue()
+ nmap += (id -> name)
+ }
+ nmap(i)
+ }
}
/** The type of the enumerated values. */
@serializable
@SerialVersionUID(7091335633555234129L)
- abstract class Value extends Ordered[Enumeration#Value] {
+ abstract class Value extends Ordered[Value] {
/** the id and bit location of this enumeration value */
def id: Int
- override def compare(that: Enumeration#Value): Int = this.id - that.id
+ override def compare(that: Value): Int = this.id - that.id
override def equals(other: Any): Boolean =
other match {
- case that: Enumeration#Value => compare(that) == 0
+ case that: thisenum.Value => compare(that) == 0
case _ => false
}
override def hashCode: Int = id.hashCode
@@ -204,7 +241,7 @@ abstract class Enumeration(initial: Int, names: String*) {
if (id >= 32) throw new IllegalArgumentException
1 << id
}
- /** this enumeration value as an <code>Long</code> bit mask.
+ /** this enumeration value as a <code>Long</code> bit mask.
* @throws IllegalArgumentException if <code>id</code> is greater than 63
*/
@deprecated("mask64 will be removed")
@@ -216,7 +253,7 @@ abstract class Enumeration(initial: Int, names: String*) {
/** A class implementing the <a href="Enumeration.Value.html"
* target="contentFrame"><code>Value</code></a> type. This class can be
- * overriden to change the enumeration's naming and integer identification
+ * overridden to change the enumeration's naming and integer identification
* behaviour.
*/
@serializable
@@ -236,9 +273,16 @@ abstract class Enumeration(initial: Int, names: String*) {
override def toString() =
if (name eq null) Enumeration.this.nameOf(i)
else name
- private def readResolve(): AnyRef =
- if (vmap ne null) vmap(i)
+ protected def readResolve(): AnyRef = {
+ val enum = Enumeration.synchronized {
+ Enumeration.emap.get(Enumeration.this.getClass) match {
+ case None => Enumeration.this
+ case Some(existing) => existing
+ }
+ }
+ if (enum.vmap ne null) enum.vmap(i)
else this
+ }
}
/** A class for sets of values
diff --git a/src/library/scala/Function.scala b/src/library/scala/Function.scala
index 0409d938fd..6ef137aa2b 100644
--- a/src/library/scala/Function.scala
+++ b/src/library/scala/Function.scala
@@ -93,10 +93,13 @@ object Function
/** Tupling for functions of arity 2. This transforms a function
* of arity 2 into a unary function that takes a pair of arguments.
*
+ * @note These functions are slotted for deprecation, but it is on
+ * hold pending superior type inference for tupling anonymous functions.
+ *
* @param f ...
* @return ...
*/
- @deprecated("Use `f.tupled` instead")
+ // @deprecated("Use `f.tupled` instead")
def tupled[a1, a2, b](f: (a1, a2) => b): Tuple2[a1, a2] => b = {
case Tuple2(x1, x2) => f(x1, x2)
}
@@ -104,7 +107,7 @@ object Function
/** Tupling for functions of arity 3. This transforms a function
* of arity 3 into a unary function that takes a triple of arguments.
*/
- @deprecated("Use `f.tupled` instead")
+ // @deprecated("Use `f.tupled` instead")
def tupled[a1, a2, a3, b](f: (a1, a2, a3) => b): Tuple3[a1, a2, a3] => b = {
case Tuple3(x1, x2, x3) => f(x1, x2, x3)
}
@@ -112,7 +115,7 @@ object Function
/** Tupling for functions of arity 4. This transforms a function
* of arity 4 into a unary function that takes a 4-tuple of arguments.
*/
- @deprecated("Use `f.tupled` instead")
+ // @deprecated("Use `f.tupled` instead")
def tupled[a1, a2, a3, a4, b](f: (a1, a2, a3, a4) => b): Tuple4[a1, a2, a3, a4] => b = {
case Tuple4(x1, x2, x3, x4) => f(x1, x2, x3, x4)
}
@@ -120,7 +123,7 @@ object Function
/** Tupling for functions of arity 5. This transforms a function
* of arity 5 into a unary function that takes a 5-tuple of arguments.
*/
- @deprecated("Use `f.tupled` instead")
+ // @deprecated("Use `f.tupled` instead")
def tupled[a1, a2, a3, a4, a5, b](f: (a1, a2, a3, a4, a5) => b): Tuple5[a1, a2, a3, a4, a5] => b = {
case Tuple5(x1, x2, x3, x4, x5) => f(x1, x2, x3, x4, x5)
}
diff --git a/src/library/scala/Immutable.scala b/src/library/scala/Immutable.scala
index 3b6fe28d52..bc0a6100f6 100644
--- a/src/library/scala/Immutable.scala
+++ b/src/library/scala/Immutable.scala
@@ -11,7 +11,7 @@
package scala
-/** A marker trait for all immutable datastructures such as imutable
+/** A marker trait for all immutable datastructures such as immutable
* collections.
*
* @since 2.8
diff --git a/src/library/scala/LowPriorityImplicits.scala b/src/library/scala/LowPriorityImplicits.scala
index d5a3727f66..899dbe27d7 100644
--- a/src/library/scala/LowPriorityImplicits.scala
+++ b/src/library/scala/LowPriorityImplicits.scala
@@ -26,21 +26,21 @@ import collection.generic.CanBuildFrom
class LowPriorityImplicits {
implicit def genericWrapArray[T](xs: Array[T]): WrappedArray[T] =
- WrappedArray.make(xs)
+ if (xs ne null) WrappedArray.make(xs) else null
- implicit def wrapRefArray[T <: AnyRef](xs: Array[T]): WrappedArray[T] = new WrappedArray.ofRef[T](xs)
- implicit def wrapIntArray(xs: Array[Int]): WrappedArray[Int] = new WrappedArray.ofInt(xs)
- implicit def wrapDoubleArray(xs: Array[Double]): WrappedArray[Double] = new WrappedArray.ofDouble(xs)
- implicit def wrapLongArray(xs: Array[Long]): WrappedArray[Long] = new WrappedArray.ofLong(xs)
- implicit def wrapFloatArray(xs: Array[Float]): WrappedArray[Float] = new WrappedArray.ofFloat(xs)
- implicit def wrapCharArray(xs: Array[Char]): WrappedArray[Char] = new WrappedArray.ofChar(xs)
- implicit def wrapByteArray(xs: Array[Byte]): WrappedArray[Byte] = new WrappedArray.ofByte(xs)
- implicit def wrapShortArray(xs: Array[Short]): WrappedArray[Short] = new WrappedArray.ofShort(xs)
- implicit def wrapBooleanArray(xs: Array[Boolean]): WrappedArray[Boolean] = new WrappedArray.ofBoolean(xs)
- implicit def wrapUnitArray(xs: Array[Unit]): WrappedArray[Unit] = new WrappedArray.ofUnit(xs)
+ implicit def wrapRefArray[T <: AnyRef](xs: Array[T]): WrappedArray[T] = if (xs ne null) new WrappedArray.ofRef[T](xs) else null
+ implicit def wrapIntArray(xs: Array[Int]): WrappedArray[Int] = if (xs ne null) new WrappedArray.ofInt(xs) else null
+ implicit def wrapDoubleArray(xs: Array[Double]): WrappedArray[Double] = if (xs ne null) new WrappedArray.ofDouble(xs) else null
+ implicit def wrapLongArray(xs: Array[Long]): WrappedArray[Long] = if (xs ne null) new WrappedArray.ofLong(xs) else null
+ implicit def wrapFloatArray(xs: Array[Float]): WrappedArray[Float] = if (xs ne null) new WrappedArray.ofFloat(xs) else null
+ implicit def wrapCharArray(xs: Array[Char]): WrappedArray[Char] = if (xs ne null) new WrappedArray.ofChar(xs) else null
+ implicit def wrapByteArray(xs: Array[Byte]): WrappedArray[Byte] = if (xs ne null) new WrappedArray.ofByte(xs) else null
+ implicit def wrapShortArray(xs: Array[Short]): WrappedArray[Short] = if (xs ne null) new WrappedArray.ofShort(xs) else null
+ implicit def wrapBooleanArray(xs: Array[Boolean]): WrappedArray[Boolean] = if (xs ne null) new WrappedArray.ofBoolean(xs) else null
+ implicit def wrapUnitArray(xs: Array[Unit]): WrappedArray[Unit] = if (xs ne null) new WrappedArray.ofUnit(xs) else null
- implicit def wrapString(s: String): WrappedString = new WrappedString(s)
- implicit def unwrapString(ws: WrappedString): String = ws.self
+ implicit def wrapString(s: String): WrappedString = if (s ne null) new WrappedString(s) else null
+ implicit def unwrapString(ws: WrappedString): String = if (ws ne null) ws.self else null
implicit def fallbackStringCanBuildFrom[T]: CanBuildFrom[String, T, collection.immutable.IndexedSeq[T]] =
new CanBuildFrom[String, T, collection.immutable.IndexedSeq[T]] {
diff --git a/src/library/scala/NotDefinedError.scala b/src/library/scala/NotDefinedError.scala
index c1939a4e9a..a47613fb9a 100644
--- a/src/library/scala/NotDefinedError.scala
+++ b/src/library/scala/NotDefinedError.scala
@@ -14,4 +14,5 @@ package scala
/**
* @since 2.0
*/
+@deprecated("Use a custom Error class instead")
final class NotDefinedError(msg: String) extends Error("not defined: " + msg)
diff --git a/src/library/scala/Option.scala b/src/library/scala/Option.scala
index 8511fa78a5..f2da220775 100644
--- a/src/library/scala/Option.scala
+++ b/src/library/scala/Option.scala
@@ -35,6 +35,7 @@ object Option
* @version 1.1, 16/01/2007
*/
sealed abstract class Option[+A] extends Product {
+ self =>
/** True if the option is the <code>None</code> value, false otherwise.
*/
@@ -45,7 +46,7 @@ sealed abstract class Option[+A] extends Product {
def isDefined: Boolean = !isEmpty
/** get the value of this option.
- * @requires that the option is nonEmpty.
+ * @note The option must be nonEmpty.
* @throws Predef.NoSuchElementException if the option is empty.
*/
def get: A
@@ -89,6 +90,22 @@ sealed abstract class Option[+A] extends Product {
def filter(p: A => Boolean): Option[A] =
if (isEmpty || p(this.get)) this else None
+ /** Necessary to keep Option from being implicitly converted to
+ * Iterable in for comprehensions.
+ */
+ def withFilter(p: A => Boolean): WithFilter = new WithFilter(p)
+
+ /** We need a whole WithFilter class to honor the "doesn't create a new
+ * collection" contract even though it seems unlikely to matter much in a
+ * collection with max size 1.
+ */
+ class WithFilter(p: A => Boolean) {
+ def map[B](f: A => B): Option[B] = self filter p map f
+ def flatMap[B](f: A => Option[B]): Option[B] = self filter p flatMap f
+ def foreach[U](f: A => U): Unit = self filter p foreach f
+ def withFilter(q: A => Boolean): WithFilter = new WithFilter(x => p(x) && q(x))
+ }
+
/** If the option is nonempty, p(value), otherwise false.
*
* @param p the predicate to test
@@ -110,7 +127,7 @@ sealed abstract class Option[+A] extends Product {
*
* @param pf the partial function.
*/
- def partialMap[B](pf: PartialFunction[A, B]): Option[B] =
+ def collect[B](pf: PartialFunction[A, B]): Option[B] =
if (!isEmpty && pf.isDefinedAt(this.get)) Some(pf(this.get)) else None
/** If the option is nonempty return it,
diff --git a/src/library/scala/Predef.scala b/src/library/scala/Predef.scala
index 5684c91aaa..2037705bab 100644
--- a/src/library/scala/Predef.scala
+++ b/src/library/scala/Predef.scala
@@ -14,6 +14,8 @@ package scala
import collection.immutable.StringOps
import collection.mutable.ArrayOps
import collection.generic.CanBuildFrom
+import annotation.elidable
+import annotation.elidable.ASSERTION
/** The <code>Predef</code> object provides definitions that are
* accessible in all Scala compilation units without explicit
@@ -53,22 +55,6 @@ object Predef extends LowPriorityImplicits {
@inline def locally[T](x: T): T = x
- // hashcode -----------------------------------------------------------
-
- @inline def hash(x: Any): Int =
- if (x.isInstanceOf[Number]) runtime.BoxesRunTime.hashFromNumber(x.asInstanceOf[Number])
- else x.hashCode
-
- @inline def hash(x: Number): Int =
- runtime.BoxesRunTime.hashFromNumber(x)
-
- @inline def hash(x: java.lang.Long): Int = {
- val iv = x.intValue
- if (iv == x.longValue) iv else x.hashCode
- }
-
- @inline def hash(x: Int): Int = x
-
// errors and asserts -------------------------------------------------
def error(message: String): Nothing = throw new RuntimeException(message)
@@ -80,38 +66,82 @@ object Predef extends LowPriorityImplicits {
throw new Throwable()
}
- import annotation.elidable
- import annotation.elidable.ASSERTION
-
+ /** Tests an expression, throwing an AssertionError if false.
+ * Calls to this method will not be generated if -Xelide-below
+ * is at least ASSERTION.
+ *
+ * @see elidable
+ * @param p the expression to test
+ */
@elidable(ASSERTION)
def assert(assertion: Boolean) {
if (!assertion)
throw new java.lang.AssertionError("assertion failed")
}
+ /** Tests an expression, throwing an AssertionError if false.
+ * Calls to this method will not be generated if -Xelide-below
+ * is at least ASSERTION.
+ *
+ * @see elidable
+ * @param p the expression to test
+ * @param msg a String to include in the failure message
+ */
@elidable(ASSERTION)
def assert(assertion: Boolean, message: => Any) {
if (!assertion)
throw new java.lang.AssertionError("assertion failed: "+ message)
}
+ /** Tests an expression, throwing an AssertionError if false.
+ * This method differs from assert only in the intent expressed:
+ * assert contains a predicate which needs to be proven, while
+ * assume contains an axiom for a static checker. Calls to this method
+ * will not be generated if -Xelide-below is at least ASSERTION.
+ *
+ * @see elidable
+ * @param p the expression to test
+ */
@elidable(ASSERTION)
def assume(assumption: Boolean) {
if (!assumption)
throw new java.lang.AssertionError("assumption failed")
}
+ /** Tests an expression, throwing an AssertionError if false.
+ * This method differs from assert only in the intent expressed:
+ * assert contains a predicate which needs to be proven, while
+ * assume contains an axiom for a static checker. Calls to this method
+ * will not be generated if -Xelide-below is at least ASSERTION.
+ *
+ * @see elidable
+ * @param p the expression to test
+ * @param msg a String to include in the failure message
+ */
@elidable(ASSERTION)
def assume(assumption: Boolean, message: => Any) {
if (!assumption)
throw new java.lang.AssertionError("assumption failed: "+ message)
}
+ /** Tests an expression, throwing an IllegalArgumentException if false.
+ * This method is similar to assert, but blames the caller of the method
+ * for violating the condition.
+ *
+ * @param p the expression to test
+ */
def require(requirement: Boolean) {
if (!requirement)
throw new IllegalArgumentException("requirement failed")
}
+ /** Tests an expression, throwing an IllegalArgumentException if false.
+ * This method is similar to assert, but blames the caller of the method
+ * for violating the condition.
+ *
+ * @param p the expression to test
+ * @param msg a String to include in the failure message
+ */
def require(requirement: Boolean, message: => Any) {
if (!requirement)
throw new IllegalArgumentException("requirement failed: "+ message)
@@ -150,7 +180,7 @@ object Predef extends LowPriorityImplicits {
def print(x: Any) = Console.print(x)
def println() = Console.println()
def println(x: Any) = Console.println(x)
- def printf(text: String, xs: Any*) = Console.printf(format(text, xs: _*))
+ def printf(text: String, xs: Any*) = Console.print(format(text, xs: _*))
def format(text: String, xs: Any*) = augmentString(text).format(xs: _*)
def readLine(): String = Console.readLine()
@@ -291,7 +321,7 @@ object Predef extends LowPriorityImplicits {
implicit def conformsOrViewsAs[A <% B, B]: A <%< B = new (A <%< B) {def apply(x: A) = x}
}
- /** A type for which there is aways an implicit value.
+ /** A type for which there is always an implicit value.
* @see fallbackCanBuildFrom in Array.scala
*/
class DummyImplicit
diff --git a/src/library/scala/Product.scala b/src/library/scala/Product.scala
index 8521cf2437..a0503cfe4c 100644
--- a/src/library/scala/Product.scala
+++ b/src/library/scala/Product.scala
@@ -20,7 +20,7 @@ package scala
*/
trait Product extends Equals {
- /** for a product <code>A(x_1,...,x_k)</code>, returns <code>x_(n+1)</code>
+ /** For a product <code>A(x_1,...,x_k)</code>, returns <code>x_(n+1)</code>
* for <code>0 &lt;= n &lt; k</code>
*
* @param n the index of the element to return
@@ -29,6 +29,21 @@ trait Product extends Equals {
*/
def productElement(n: Int): Any
+ // !!! This will be disabled pending reimplementation, but it can't be removed
+ // until starr forgets about it.
+
+ /** Returns the name of the field at the given index from the definition
+ * of the class.
+ *
+ * @param n the index of the element name to return
+ * @throws NoSuchElementException if the name data is unavailable for any reason
+ * @throws IndexOutOfBoundsException if the index is out of range
+ * @return a String representing the field name
+ */
+ def productElementName(n: Int): String =
+ // the method implementation is synthetic - if it is not generated we always throw.
+ throw new NoSuchElementException()
+
/** return k for a product <code>A(x_1,...,x_k)</code>
*/
def productArity: Int
diff --git a/src/library/scala/Tuple2.scala b/src/library/scala/Tuple2.scala
index 2a4797ab5a..8c4e5973c5 100644
--- a/src/library/scala/Tuple2.scala
+++ b/src/library/scala/Tuple2.scala
@@ -42,6 +42,11 @@ case class Tuple2[+T1, +T2](_1:T1,_2:T2)
b1.result
}
+ /** Wraps a tuple in a `Zipped`, which supports 2-ary generalisations of map, flatMap, filter,...
+ *
+ * @see Zipped
+ * $willNotTerminateInf
+ */
def zipped[Repr1, El1, Repr2, El2](implicit w1: T1 => TraversableLike[El1, Repr1], w2: T2 => IterableLike[El2, Repr2]): Zipped[Repr1, El1, Repr2, El2]
= new Zipped[Repr1, El1, Repr2, El2](_1, _2)
diff --git a/src/library/scala/Tuple3.scala b/src/library/scala/Tuple3.scala
index b70310db3f..a1fca95e4d 100644
--- a/src/library/scala/Tuple3.scala
+++ b/src/library/scala/Tuple3.scala
@@ -41,6 +41,11 @@ case class Tuple3[+T1, +T2, +T3](_1:T1,_2:T2,_3:T3)
b1.result
}
+ /** Wraps a tuple in a `Zipped`, which supports 3-ary generalisations of map, flatMap, filter,...
+ *
+ * @see Zipped
+ * $willNotTerminateInf
+ */
def zipped[Repr1, El1, Repr2, El2, Repr3, El3](implicit w1: T1 => TraversableLike[El1, Repr1],
w2: T2 => IterableLike[El2, Repr2],
w3: T3 => IterableLike[El3, Repr3]): Zipped[Repr1, El1, Repr2, El2, Repr3, El3]
diff --git a/src/library/scala/annotation/elidable.scala b/src/library/scala/annotation/elidable.scala
index 4f29c8f2ab..c75299e9fd 100644
--- a/src/library/scala/annotation/elidable.scala
+++ b/src/library/scala/annotation/elidable.scala
@@ -13,18 +13,18 @@ import java.util.logging.Level
/** An annotation for methods for which invocations might
* be removed in the generated code.
*
- * Behavior is influenced by passing -Xelide-level <arg>
+ * Behavior is influenced by passing -Xelide-below <arg>
* to scalac. Methods marked elidable will be omitted from
* generated code if the priority given the annotation is lower
* than to the command line argument. Examples:
- *
+ * {{{
* import annotation.elidable._
*
* @elidable(WARNING) def foo = log("foo")
* @elidable(FINE) def bar = log("bar")
*
- * scalac -Xelide-methods-below=1000
- *
+ * scalac -Xelide-below=1000
+ * }}}
* @since 2.8
*/
final class elidable(final val level: Int) extends StaticAnnotation {}
diff --git a/src/library/scala/annotation/migration.scala b/src/library/scala/annotation/migration.scala
new file mode 100644
index 0000000000..b0915cde34
--- /dev/null
+++ b/src/library/scala/annotation/migration.scala
@@ -0,0 +1,28 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.annotation
+
+/**
+ * An annotation that marks a member as having changed semantics
+ * between versions. This is intended for methods which for one
+ * reason or another retain the same name and type signature,
+ * but some aspect of their behavior is different. An illustrative
+ * examples is Stack.iterator, which reversed from LIFO to FIFO
+ * order between scala 2.7 and 2.8.
+ *
+ * The version numbers are to mark the scala major/minor release
+ * version where the change took place.
+ *
+ * @since 2.8
+ */
+private[scala] final class migration(
+ majorVersion: Int,
+ minorVersion: Int,
+ message: String)
+extends StaticAnnotation {}
diff --git a/src/library/scala/collection/BitSetLike.scala b/src/library/scala/collection/BitSetLike.scala
index aac731fec9..8476ede7b5 100644
--- a/src/library/scala/collection/BitSetLike.scala
+++ b/src/library/scala/collection/BitSetLike.scala
@@ -28,11 +28,10 @@ import mutable.StringBuilder
* @since 2.8
* @define coll bitset
* @define Coll BitSet
- * define bitsetinfo
+ * @define bitsetinfo
* Bitsets are sets of non-negative integers which are represented as
* variable-size arrays of bits packed into 64-bit words. The size of a bitset is
* determined by the largest number stored in it.
-
*/
trait BitSetLike[+This <: BitSetLike[This] with Set[Int]] extends SetLike[Int, This] { self =>
@@ -42,7 +41,7 @@ trait BitSetLike[+This <: BitSetLike[This] with Set[Int]] extends SetLike[Int, T
protected def nwords: Int
/** The words at index `idx', or 0L if outside the range of the set
- * @pre idx >= 0
+ * @note Requires `idx >= 0`
*/
protected def word(idx: Int): Long
diff --git a/src/library/scala/collection/IndexedSeq.scala b/src/library/scala/collection/IndexedSeq.scala
index 05141fb864..50a66e924c 100644
--- a/src/library/scala/collection/IndexedSeq.scala
+++ b/src/library/scala/collection/IndexedSeq.scala
@@ -14,15 +14,9 @@ package scala.collection
import generic._
import mutable.Builder
-/** <p>
- * Sequences that support O(1) element access and O(1) length computation.
- * </p>
- * <p>
- * This class does not add any methods to <code>Sequence</code> but
- * overrides several methods with optimized implementations.
- * </p>
+/** A base trait for indexed sequences.
+ * $indexedSeqInfo
*
- * @author Sean McDirmid
* @author Martin Odersky
* @version 2.8
* @since 2.8
diff --git a/src/library/scala/collection/IndexedSeqLike.scala b/src/library/scala/collection/IndexedSeqLike.scala
index 8164075629..ea6e1bb493 100644
--- a/src/library/scala/collection/IndexedSeqLike.scala
+++ b/src/library/scala/collection/IndexedSeqLike.scala
@@ -18,16 +18,23 @@ import scala.annotation.tailrec
/** A template trait for indexed sequences of type `IndexedSeq[A]`.
*
* $indexedSeqInfo
+ *
+ * This trait just implements `iterator` in terms of `apply` and `length`.
+ * However, see `IndexedSeqOptimized` for an implementation trait that overrides operations
+ * to make them run faster under the assumption of fast random access with `apply`.
+ *
* @author Sean McDirmid
* @author Martin Odersky
* @version 2.8
* @since 2.8
+ * @define Coll IndexedSeq
* @define indexedSeqInfo
* Indexed sequences support constant-time or near constant-time element
- * access and length computation.
+ * access and length computation. They are defined in terms of abstract methods
+ * `apply` fpor indexing and `length`.
*
- * Indexed sequences do not define any new methods wrt `Seq`. However, some `Seq` methods
- * are overridden with optimized implementations.
+ * Indexed sequences do not add any new methods wrt `Seq`, but promise
+ * efficient implementations of random access patterns.
*
* @tparam A the element type of the $coll
* @tparam Repr the type of the actual $coll containing the elements.
@@ -76,267 +83,7 @@ trait IndexedSeqLike[+A, +Repr] extends SeqLike[A, Repr] { self =>
override /*IterableLike*/
def iterator: Iterator[A] = new Elements(0, length)
-
- override /*IterableLike*/
- def isEmpty: Boolean = { length == 0 }
-
- override /*IterableLike*/
- def foreach[U](f: A => U): Unit = {
- var i = 0
- val len = length
- while (i < len) { f(this(i)); i += 1 }
- }
-
- override /*IterableLike*/
- def forall(p: A => Boolean): Boolean = prefixLength(p(_)) == length
-
- override /*IterableLike*/
- def exists(p: A => Boolean): Boolean = prefixLength(!p(_)) != length
-
- override /*IterableLike*/
- def find(p: A => Boolean): Option[A] = {
- val i = prefixLength(!p(_))
- if (i < length) Some(this(i)) else None
- }
/*
- override /*IterableLike*/
- def mapFind[B](f: A => Option[B]): Option[B] = {
- var i = 0
- var res: Option[B] = None
- val len = length
- while (res.isEmpty && i < len) {
- res = f(this(i))
- i += 1
- }
- res
- }
-*/
- @tailrec
- private def foldl[B](start: Int, end: Int, z: B, op: (B, A) => B): B =
- if (start == end) z
- else foldl(start + 1, end, op(z, this(start)), op)
-
- @tailrec
- private def foldr[B](start: Int, end: Int, z: B, op: (A, B) => B): B =
- if (start == end) z
- else foldr(start, end - 1, op(this(end - 1), z), op)
-
- override /*TraversableLike*/
- def foldLeft[B](z: B)(op: (B, A) => B): B =
- foldl(0, length, z, op)
-
- override /*IterableLike*/
- def foldRight[B](z: B)(op: (A, B) => B): B =
- foldr(0, length, z, op)
-
- override /*TraversableLike*/
- def reduceLeft[B >: A](op: (B, A) => B): B =
- if (length > 0) foldl(1, length, this(0), op) else super.reduceLeft(op)
-
- override /*IterableLike*/
- def reduceRight[B >: A](op: (A, B) => B): B =
- if (length > 0) foldr(0, length - 1, this(length - 1), op) else super.reduceRight(op)
-
- override /*IterableLike*/
- def zip[A1 >: A, B, That](that: Iterable[B])(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That = that match {
- case that: IndexedSeq[_] =>
- val b = bf(repr)
- var i = 0
- val len = this.length min that.length
- b.sizeHint(len)
- while (i < len) {
- b += ((this(i), that(i).asInstanceOf[B]))
- i += 1
- }
- b.result
- case _ =>
- super.zip[A1, B, That](that)(bf)
- }
-
- override /*IterableLike*/
- def zipWithIndex[A1 >: A, That](implicit bf: CanBuildFrom[Repr, (A1, Int), That]): That = {
- val b = bf(repr)
- val len = length
- b.sizeHint(len)
- var i = 0
- while (i < len) {
- b += ((this(i), i))
- i += 1
- }
- b.result
- }
-
- override /*IterableLike*/
- def slice(from: Int, until: Int): Repr = {
- var i = from max 0
- val end = until min length
- val b = newBuilder
- b.sizeHint(end - i)
- while (i < end) {
- b += this(i)
- i += 1
- }
- b.result
- }
-
- override /*IterableLike*/
- def head: A = if (isEmpty) super.head else this(0)
-
- override /*TraversableLike*/
- def tail: Repr = if (isEmpty) super.tail else slice(1, length)
-
- override /*TraversableLike*/
- def last: A = if (length > 0) this(length - 1) else super.last
-
- override /*IterableLike*/
- def init: Repr = if (length > 0) slice(0, length - 1) else super.init
-
- override /*TraversableLike*/
- def take(n: Int): Repr = slice(0, n)
-
- override /*TraversableLike*/
- def drop(n: Int): Repr = slice(n, length)
-
- override /*IterableLike*/
- def takeRight(n: Int): Repr = slice(length - n, length)
-
- override /*IterableLike*/
- def dropRight(n: Int): Repr = slice(0, length - n)
-
- override /*TraversableLike*/
- def splitAt(n: Int): (Repr, Repr) = (take(n), drop(n))
-
- override /*IterableLike*/
- def takeWhile(p: A => Boolean): Repr = take(prefixLength(p))
-
- override /*TraversableLike*/
- def dropWhile(p: A => Boolean): Repr = drop(prefixLength(p))
-
- override /*TraversableLike*/
- def span(p: A => Boolean): (Repr, Repr) = splitAt(prefixLength(p))
-
- override /*IterableLike*/
- def sameElements[B >: A](that: Iterable[B]): Boolean = that match {
- case that: IndexedSeq[_] =>
- val len = length
- len == that.length && {
- var i = 0
- while (i < len && this(i) == that(i)) i += 1
- i == len
- }
- case _ =>
- super.sameElements(that)
- }
-
- override /*IterableLike*/
- def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) {
- var i = 0
- var j = start
- val end = length min len min (xs.length - start)
- while (i < end) {
- xs(j) = this(i)
- i += 1
- j += 1
- }
- }
-
-
- // Overridden methods from Seq
-
- override /*SeqLike*/
- def lengthCompare(len: Int): Int = length - len
-
- override /*SeqLike*/
- def segmentLength(p: A => Boolean, from: Int): Int = {
- val start = from
- val len = length
- var i = start
- while (i < len && p(this(i))) i += 1
- i - start
- }
-
- private def negLength(n: Int) = if (n == length) -1 else n
-
- override /*SeqLike*/
- def indexWhere(p: A => Boolean, from: Int): Int = {
- val start = from max 0
- negLength(start + segmentLength(!p(_), start))
- }
-
- override /*SeqLike*/
- def lastIndexWhere(p: A => Boolean, end: Int): Int = {
- var i = end
- while (i >= 0 && !p(this(i))) i -= 1
- i
- }
-
- override /*SeqLike*/
- def reverse: Repr = {
- val b = newBuilder
- b.sizeHint(length)
- var i = length
- while (0 < i) {
- i -= 1
- b += this(i)
- }
- b.result
- }
-
- override /*SeqLike*/
- def reverseIterator: Iterator[A] = new Iterator[A] {
- private var i = self.length
- def hasNext: Boolean = 0 < i
- def next: A =
- if (0 < i) {
- i -= 1
- self(i)
- } else Iterator.empty.next
- }
-
- override /*SeqLike*/
- def startsWith[B](that: Seq[B], offset: Int): Boolean = that match {
- case that: IndexedSeq[_] =>
- var i = offset
- var j = 0
- val thisLen = length
- val thatLen = that.length
- while (i < thisLen && j < thatLen && this(i) == that(j)) {
- i += 1
- j += 1
- }
- j == thatLen
- case _ =>
- var i = offset
- val thisLen = length
- val thatElems = that.iterator
- while (i < thisLen && thatElems.hasNext) {
- if (this(i) != thatElems.next())
- return false
-
- i += 1
- }
- !thatElems.hasNext
- }
-
- override /*SeqLike*/
- def endsWith[B](that: Seq[B]): Boolean = that match {
- case that: IndexedSeq[_] =>
- var i = length - 1
- var j = that.length - 1
-
- (j <= i) && {
- while (j >= 0) {
- if (this(i) != that(j))
- return false
- i -= 1
- j -= 1
- }
- true
- }
- case _ =>
- super.endsWith(that)
- }
-
override /*SeqLike*/
def view = new IndexedSeqView[A, Repr] {
protected lazy val underlying = self.repr
@@ -347,5 +94,6 @@ trait IndexedSeqLike[+A, +Repr] extends SeqLike[A, Repr] { self =>
override /*SeqLike*/
def view(from: Int, until: Int) = view.slice(from, until)
+*/
}
diff --git a/src/library/scala/collection/IndexedSeqOptimized.scala b/src/library/scala/collection/IndexedSeqOptimized.scala
new file mode 100755
index 0000000000..12b39c8b83
--- /dev/null
+++ b/src/library/scala/collection/IndexedSeqOptimized.scala
@@ -0,0 +1,293 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+// $Id: IndexedSeqLike.scala 20129 2009-12-14 17:12:17Z odersky $
+
+
+package scala.collection
+
+import generic._
+import mutable.ArrayBuffer
+import scala.annotation.tailrec
+
+/** A template trait for indexed sequences of type `IndexedSeq[A]` which optimizes
+ * the implementation of several methods under the assumption of fast random access.
+ *
+ * $indexedSeqInfo
+ * @author Martin Odersky
+ * @version 2.8
+ * @since 2.8
+ *
+ * @tparam A the element type of the $coll
+ * @tparam Repr the type of the actual $coll containing the elements.
+ * @define willNotTerminateInf
+ * @define mayNotTerminateInf
+ */
+trait IndexedSeqOptimized[+A, +Repr] extends IndexedSeqLike[A, Repr] { self =>
+
+ override /*IterableLike*/
+ def isEmpty: Boolean = { length == 0 }
+
+ override /*IterableLike*/
+ def foreach[U](f: A => U): Unit = {
+ var i = 0
+ val len = length
+ while (i < len) { f(this(i)); i += 1 }
+ }
+
+ override /*IterableLike*/
+ def forall(p: A => Boolean): Boolean = prefixLength(p(_)) == length
+
+ override /*IterableLike*/
+ def exists(p: A => Boolean): Boolean = prefixLength(!p(_)) != length
+
+ override /*IterableLike*/
+ def find(p: A => Boolean): Option[A] = {
+ val i = prefixLength(!p(_))
+ if (i < length) Some(this(i)) else None
+ }
+/*
+ override /*IterableLike*/
+ def mapFind[B](f: A => Option[B]): Option[B] = {
+ var i = 0
+ var res: Option[B] = None
+ val len = length
+ while (res.isEmpty && i < len) {
+ res = f(this(i))
+ i += 1
+ }
+ res
+ }
+*/
+ @tailrec
+ private def foldl[B](start: Int, end: Int, z: B, op: (B, A) => B): B =
+ if (start == end) z
+ else foldl(start + 1, end, op(z, this(start)), op)
+
+ @tailrec
+ private def foldr[B](start: Int, end: Int, z: B, op: (A, B) => B): B =
+ if (start == end) z
+ else foldr(start, end - 1, op(this(end - 1), z), op)
+
+ override /*TraversableLike*/
+ def foldLeft[B](z: B)(op: (B, A) => B): B =
+ foldl(0, length, z, op)
+
+ override /*IterableLike*/
+ def foldRight[B](z: B)(op: (A, B) => B): B =
+ foldr(0, length, z, op)
+
+ override /*TraversableLike*/
+ def reduceLeft[B >: A](op: (B, A) => B): B =
+ if (length > 0) foldl(1, length, this(0), op) else super.reduceLeft(op)
+
+ override /*IterableLike*/
+ def reduceRight[B >: A](op: (A, B) => B): B =
+ if (length > 0) foldr(0, length - 1, this(length - 1), op) else super.reduceRight(op)
+
+ override /*IterableLike*/
+ def zip[A1 >: A, B, That](that: Iterable[B])(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That = that match {
+ case that: IndexedSeq[_] =>
+ val b = bf(repr)
+ var i = 0
+ val len = this.length min that.length
+ b.sizeHint(len)
+ while (i < len) {
+ b += ((this(i), that(i).asInstanceOf[B]))
+ i += 1
+ }
+ b.result
+ case _ =>
+ super.zip[A1, B, That](that)(bf)
+ }
+
+ override /*IterableLike*/
+ def zipWithIndex[A1 >: A, That](implicit bf: CanBuildFrom[Repr, (A1, Int), That]): That = {
+ val b = bf(repr)
+ val len = length
+ b.sizeHint(len)
+ var i = 0
+ while (i < len) {
+ b += ((this(i), i))
+ i += 1
+ }
+ b.result
+ }
+
+ override /*IterableLike*/
+ def slice(from: Int, until: Int): Repr = {
+ var i = from max 0
+ val end = until min length
+ val b = newBuilder
+ b.sizeHint(end - i)
+ while (i < end) {
+ b += this(i)
+ i += 1
+ }
+ b.result
+ }
+
+ override /*IterableLike*/
+ def head: A = if (isEmpty) super.head else this(0)
+
+ override /*TraversableLike*/
+ def tail: Repr = if (isEmpty) super.tail else slice(1, length)
+
+ override /*TraversableLike*/
+ def last: A = if (length > 0) this(length - 1) else super.last
+
+ override /*IterableLike*/
+ def init: Repr = if (length > 0) slice(0, length - 1) else super.init
+
+ override /*TraversableLike*/
+ def take(n: Int): Repr = slice(0, n)
+
+ override /*TraversableLike*/
+ def drop(n: Int): Repr = slice(n, length)
+
+ override /*IterableLike*/
+ def takeRight(n: Int): Repr = slice(length - n, length)
+
+ override /*IterableLike*/
+ def dropRight(n: Int): Repr = slice(0, length - n)
+
+ override /*TraversableLike*/
+ def splitAt(n: Int): (Repr, Repr) = (take(n), drop(n))
+
+ override /*IterableLike*/
+ def takeWhile(p: A => Boolean): Repr = take(prefixLength(p))
+
+ override /*TraversableLike*/
+ def dropWhile(p: A => Boolean): Repr = drop(prefixLength(p))
+
+ override /*TraversableLike*/
+ def span(p: A => Boolean): (Repr, Repr) = splitAt(prefixLength(p))
+
+ override /*IterableLike*/
+ def sameElements[B >: A](that: Iterable[B]): Boolean = that match {
+ case that: IndexedSeq[_] =>
+ val len = length
+ len == that.length && {
+ var i = 0
+ while (i < len && this(i) == that(i)) i += 1
+ i == len
+ }
+ case _ =>
+ super.sameElements(that)
+ }
+
+ override /*IterableLike*/
+ def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) {
+ var i = 0
+ var j = start
+ val end = length min len min (xs.length - start)
+ while (i < end) {
+ xs(j) = this(i)
+ i += 1
+ j += 1
+ }
+ }
+
+
+ // Overridden methods from Seq
+
+ override /*SeqLike*/
+ def lengthCompare(len: Int): Int = length - len
+
+ override /*SeqLike*/
+ def segmentLength(p: A => Boolean, from: Int): Int = {
+ val start = from
+ val len = length
+ var i = start
+ while (i < len && p(this(i))) i += 1
+ i - start
+ }
+
+ private def negLength(n: Int) = if (n == length) -1 else n
+
+ override /*SeqLike*/
+ def indexWhere(p: A => Boolean, from: Int): Int = {
+ val start = from max 0
+ negLength(start + segmentLength(!p(_), start))
+ }
+
+ override /*SeqLike*/
+ def lastIndexWhere(p: A => Boolean, end: Int): Int = {
+ var i = end
+ while (i >= 0 && !p(this(i))) i -= 1
+ i
+ }
+
+ override /*SeqLike*/
+ def reverse: Repr = {
+ val b = newBuilder
+ b.sizeHint(length)
+ var i = length
+ while (0 < i) {
+ i -= 1
+ b += this(i)
+ }
+ b.result
+ }
+
+ override /*SeqLike*/
+ def reverseIterator: Iterator[A] = new Iterator[A] {
+ private var i = self.length
+ def hasNext: Boolean = 0 < i
+ def next: A =
+ if (0 < i) {
+ i -= 1
+ self(i)
+ } else Iterator.empty.next
+ }
+
+ override /*SeqLike*/
+ def startsWith[B](that: Seq[B], offset: Int): Boolean = that match {
+ case that: IndexedSeq[_] =>
+ var i = offset
+ var j = 0
+ val thisLen = length
+ val thatLen = that.length
+ while (i < thisLen && j < thatLen && this(i) == that(j)) {
+ i += 1
+ j += 1
+ }
+ j == thatLen
+ case _ =>
+ var i = offset
+ val thisLen = length
+ val thatElems = that.iterator
+ while (i < thisLen && thatElems.hasNext) {
+ if (this(i) != thatElems.next())
+ return false
+
+ i += 1
+ }
+ !thatElems.hasNext
+ }
+
+ override /*SeqLike*/
+ def endsWith[B](that: Seq[B]): Boolean = that match {
+ case that: IndexedSeq[_] =>
+ var i = length - 1
+ var j = that.length - 1
+
+ (j <= i) && {
+ while (j >= 0) {
+ if (this(i) != that(j))
+ return false
+ i -= 1
+ j -= 1
+ }
+ true
+ }
+ case _ =>
+ super.endsWith(that)
+ }
+}
+
diff --git a/src/library/scala/collection/IndexedSeqView.scala b/src/library/scala/collection/IndexedSeqView.scala
deleted file mode 100644
index 72f3374e94..0000000000
--- a/src/library/scala/collection/IndexedSeqView.scala
+++ /dev/null
@@ -1,38 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-
-package scala.collection
-
-import TraversableView.NoBuilder
-import generic._
-
-/** A non-strict projection of an iterable.
- *
- * @author Sean McDirmid
- * @author Martin Odersky
- * @version 2.8
- * @since 2.8
- */
-trait IndexedSeqView[+A, +Coll] extends IndexedSeqViewLike[A, Coll, IndexedSeqView[A, Coll]]
-
-object IndexedSeqView {
- type Coll = TraversableView[_, C] forSome {type C <: Traversable[_]}
- implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, IndexedSeqView[A, IndexedSeq[_]]] =
- new CanBuildFrom[Coll, A, IndexedSeqView[A, IndexedSeq[_]]] {
- def apply(from: Coll) = new NoBuilder
- def apply() = new NoBuilder
- }
- implicit def arrCanBuildFrom[A]: CanBuildFrom[TraversableView[_, Array[_]], A, IndexedSeqView[A, Array[A]]] =
- new CanBuildFrom[TraversableView[_, Array[_]], A, IndexedSeqView[A, Array[A]]] {
- def apply(from: TraversableView[_, Array[_]]) = new NoBuilder
- def apply() = new NoBuilder
- }
-}
diff --git a/src/library/scala/collection/IndexedSeqViewLike.scala b/src/library/scala/collection/IndexedSeqViewLike.scala
deleted file mode 100644
index 07f63ad2b0..0000000000
--- a/src/library/scala/collection/IndexedSeqViewLike.scala
+++ /dev/null
@@ -1,113 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id: Seq.scala 16092 2008-09-12 10:37:06Z nielsen $
-
-
-package scala.collection
-
-import generic._
-import TraversableView.NoBuilder
-
-/** A template trait for a non-strict view of a IndexedSeq.
- *
- * @author Sean McDirmid
- * @author Martin Odersky
- * @version 2.8
- * @since 2.8
- */
-trait IndexedSeqViewLike[+A,
- +Coll,
- +This <: IndexedSeqView[A, Coll] with IndexedSeqViewLike[A, Coll, This]]
- extends IndexedSeq[A] with IndexedSeqLike[A, This] with SeqView[A, Coll] with SeqViewLike[A, Coll, This]
-{ self =>
-
- trait Transformed[+B] extends IndexedSeqView[B, Coll] with super.Transformed[B]
-
- trait Sliced extends Transformed[A] with super.Sliced {
- /** Override to use IndexedSeq's foreach; todo: see whether this is really faster */
- override def foreach[U](f: A => U) = super[Transformed].foreach(f)
- }
-
- trait Mapped[B] extends Transformed[B] with super.Mapped[B] {
- override def foreach[U](f: B => U) = super[Transformed].foreach(f)
- }
-
- trait FlatMapped[B] extends Transformed[B] with super.FlatMapped[B] {
- override def foreach[U](f: B => U) = super[Transformed].foreach(f)
- }
-
- trait Appended[B >: A] extends Transformed[B] with super.Appended[B] {
- override def foreach[U](f: B => U) = super[Transformed].foreach(f)
- }
-
- trait Filtered extends Transformed[A] with super.Filtered {
- override def foreach[U](f: A => U) = super[Transformed].foreach(f)
- }
-
- trait TakenWhile extends Transformed[A] with super.TakenWhile {
- override def foreach[U](f: A => U) = super[Transformed].foreach(f)
- }
-
- trait DroppedWhile extends Transformed[A] with super.DroppedWhile {
- override def foreach[U](f: A => U) = super[Transformed].foreach(f)
- }
-
- trait Reversed extends Transformed[A] with super.Reversed {
- override def foreach[U](f: A => U) = super[Transformed].foreach(f)
- }
-
- trait Patched[B >: A] extends Transformed[B] with super.Patched[B] {
- override def foreach[U](f: B => U) = super[Transformed].foreach(f)
- }
-
- trait Zipped[B] extends Transformed[(A, B)] {
- protected[this] val other: Iterable[B]
- /** Have to be careful here - other may be an infinite sequence. */
- def length =
- if (other.hasDefiniteSize) self.length min other.size
- else other take self.length size
-
- def apply(idx: Int): (A, B) = (self.apply(idx), other.iterator drop idx next)
- override def stringPrefix = self.stringPrefix+"Z"
- }
-
- trait ZippedAll[A1 >: A, B] extends Transformed[(A1, B)] {
- protected[this] val other: Iterable[B]
- val thisElem: A1
- val thatElem: B
- override def iterator: Iterator[(A1, B)] =
- self.iterator.zipAll(other.iterator, thisElem, thatElem)
-
- def length = self.length max other.size
- def apply(idx: Int): (A1, B) = {
- val z1 = if (idx < self.length) self.apply(idx) else thisElem
- val z2 = if (idx < other.size) other drop idx head else thatElem
- (z1, z2)
- }
- override def stringPrefix = self.stringPrefix+"Z"
- }
-
- /** Boilerplate method, to override in each subclass
- * This method could be eliminated if Scala had virtual classes
- */
- protected override def newAppended[B >: A](that: Traversable[B]): Transformed[B] = new Appended[B] { val rest = that }
- protected override def newMapped[B](f: A => B): Transformed[B] = new Mapped[B] { val mapping = f }
- protected override def newFlatMapped[B](f: A => Traversable[B]): Transformed[B] = new FlatMapped[B] { val mapping = f }
- protected override def newFiltered(p: A => Boolean): Transformed[A] = new Filtered { val pred = p }
- protected override def newSliced(_from: Int, _until: Int): Transformed[A] = new Sliced { val from = _from; val until = _until }
- protected override def newDroppedWhile(p: A => Boolean): Transformed[A] = new DroppedWhile { val pred = p }
- protected override def newTakenWhile(p: A => Boolean): Transformed[A] = new TakenWhile { val pred = p }
- protected override def newZipped[B](that: Iterable[B]): Transformed[(A, B)] = new Zipped[B] { val other = that }
- protected override def newZippedAll[A1 >: A, B](that: Iterable[B], _thisElem: A1, _thatElem: B): Transformed[(A1, B)] = new ZippedAll[A1, B] { val other = that; val thisElem = _thisElem; val thatElem = _thatElem }
- protected override def newReversed: Transformed[A] = new Reversed { }
- protected override def newPatched[B >: A](_from: Int, _patch: Seq[B], _replaced: Int): Transformed[B] = new Patched[B] {
- val from = _from; val patch = _patch; val replaced = _replaced
- }
- override def stringPrefix = "IndexedSeqView"
-}
diff --git a/src/library/scala/collection/IterableLike.scala b/src/library/scala/collection/IterableLike.scala
index 8446988821..6f88c72ffd 100644
--- a/src/library/scala/collection/IterableLike.scala
+++ b/src/library/scala/collection/IterableLike.scala
@@ -69,9 +69,6 @@ self =>
*/
def iterator: Iterator[A]
- @deprecated("use `iterator' instead")
- def elements = iterator
-
/** Applies a function `f` to all elements of this $coll.
*
* Note: this method underlies the implementation of most other bulk operations.
@@ -189,7 +186,7 @@ self =>
b.result
}
- /** Selects all elements except first ''n'' ones.
+ /** Selects all elements except last ''n'' ones.
* $orderDependent
*
* @param n The number of elements to take
@@ -367,6 +364,9 @@ self =>
override /*TraversableLike*/ def view(from: Int, until: Int) = view.slice(from, until)
+ @deprecated("use `iterator' instead")
+ def elements = iterator
+
@deprecated("use `head' instead") def first: A = head
/** `None` if iterable is empty.
diff --git a/src/library/scala/collection/IterableProxyLike.scala b/src/library/scala/collection/IterableProxyLike.scala
index 4400237486..fe148339b0 100644
--- a/src/library/scala/collection/IterableProxyLike.scala
+++ b/src/library/scala/collection/IterableProxyLike.scala
@@ -24,24 +24,20 @@ import mutable.Buffer
* @version 2.8
* @since 2.8
*/
-trait IterableProxyLike[+A, +This <: IterableLike[A, This] with Iterable[A]]
- extends IterableLike[A, This]
- with TraversableProxyLike[A, This]
+trait IterableProxyLike[+A, +Repr <: IterableLike[A, Repr] with Iterable[A]]
+ extends IterableLike[A, Repr]
+ with TraversableProxyLike[A, Repr]
{
override def iterator: Iterator[A] = self.iterator
- override def foreach[U](f: A => U): Unit = self.foreach(f)
- override def isEmpty: Boolean = self.isEmpty
- override def foldRight[B](z: B)(op: (A, B) => B): B = self.foldRight(z)(op)
- override def reduceRight[B >: A](op: (A, B) => B): B = self.reduceRight(op)
- override def toIterable: Iterable[A] = self.toIterable
- override def zip[A1 >: A, B, That](that: Iterable[B])(implicit bf: CanBuildFrom[This, (A1, B), That]): That = self.zip[A1, B, That](that)(bf)
- override def zipAll[B, A1 >: A, That](that: Iterable[B], thisElem: A1, thatElem: B)(implicit bf: CanBuildFrom[This, (A1, B), That]): That = self.zipAll(that, thisElem, thatElem)(bf)
- override def zipWithIndex[A1 >: A, That](implicit bf: CanBuildFrom[This, (A1, Int), That]): That = self.zipWithIndex(bf)
- override def head: A = self.head
- override def takeRight(n: Int): This = self.takeRight(n)
- override def dropRight(n: Int): This = self.dropRight(n)
+ override def grouped(size: Int): Iterator[Repr] = self.grouped(size)
+ override def sliding[B >: A](size: Int): Iterator[Repr] = self.sliding(size)
+ override def sliding[B >: A](size: Int, step: Int): Iterator[Repr] = self.sliding(size, step)
+ override def takeRight(n: Int): Repr = self.takeRight(n)
+ override def dropRight(n: Int): Repr = self.dropRight(n)
+ override def zip[A1 >: A, B, That](that: Iterable[B])(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That = self.zip[A1, B, That](that)(bf)
+ override def zipAll[B, A1 >: A, That](that: Iterable[B], thisElem: A1, thatElem: B)(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That = self.zipAll(that, thisElem, thatElem)(bf)
+ override def zipWithIndex[A1 >: A, That](implicit bf: CanBuildFrom[Repr, (A1, Int), That]): That = self.zipWithIndex(bf)
override def sameElements[B >: A](that: Iterable[B]): Boolean = self.sameElements(that)
- override def toStream: Stream[A] = self.toStream
override def view = self.view
override def view(from: Int, until: Int) = self.view(from, until)
}
diff --git a/src/library/scala/collection/IterableViewLike.scala b/src/library/scala/collection/IterableViewLike.scala
index 27323294c4..09f084d92c 100644
--- a/src/library/scala/collection/IterableViewLike.scala
+++ b/src/library/scala/collection/IterableViewLike.scala
@@ -29,6 +29,10 @@ extends Iterable[A] with IterableLike[A, This] with TraversableView[A, Coll] wit
trait Transformed[+B] extends IterableView[B, Coll] with super.Transformed[B]
+ trait Forced[B] extends Transformed[B] with super.Forced[B] {
+ override def iterator = forced.iterator
+ }
+
trait Sliced extends Transformed[A] with super.Sliced {
override def iterator = self.iterator slice (from, until)
}
@@ -96,6 +100,7 @@ extends Iterable[A] with IterableLike[A, This] with TraversableView[A, Coll] wit
/** Boilerplate method, to override in each subclass
* This method could be eliminated if Scala had virtual classes
*/
+ protected override def newForced[B](xs: => Seq[B]): Transformed[B] = new Forced[B] { val forced = xs }
protected override def newAppended[B >: A](that: Traversable[B]): Transformed[B] = new Appended[B] { val rest = that }
protected override def newMapped[B](f: A => B): Transformed[B] = new Mapped[B] { val mapping = f }
protected override def newFlatMapped[B](f: A => Traversable[B]): Transformed[B] = new FlatMapped[B] { val mapping = f }
@@ -104,5 +109,11 @@ extends Iterable[A] with IterableLike[A, This] with TraversableView[A, Coll] wit
protected override def newDroppedWhile(p: A => Boolean): Transformed[A] = new DroppedWhile { val pred = p }
protected override def newTakenWhile(p: A => Boolean): Transformed[A] = new TakenWhile { val pred = p }
+ override def grouped(size: Int): Iterator[This] =
+ self.iterator.grouped(size).map(xs => newForced(xs).asInstanceOf[This])
+
+ override def sliding[B >: A](size: Int, step: Int): Iterator[This] =
+ self.iterator.sliding(size).map(xs => newForced(xs).asInstanceOf[This])
+
override def stringPrefix = "IterableView"
}
diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala
index de0ec5275f..701b24f300 100644
--- a/src/library/scala/collection/Iterator.scala
+++ b/src/library/scala/collection/Iterator.scala
@@ -13,7 +13,7 @@ package scala.collection
import mutable.{Buffer, ArrayBuffer, ListBuffer, StringBuilder}
import immutable.{List, Stream}
-import annotation.{ tailrec }
+import annotation.{ tailrec, migration }
/** The `Iterator` object provides various functions for
* creating specialized iterators.
@@ -135,7 +135,7 @@ object Iterator {
}
/** Creates an infinite-length iterator returning the results of evaluating
- * an expression. The epxression is recomputed for every element.
+ * an expression. The expression is recomputed for every element.
*
* @param elem the element computation.
* @return the iterator containing an infinite number of results of evaluating `elem`.
@@ -145,6 +145,27 @@ object Iterator {
def next = elem
}
+ /** With the advent of TraversableOnce, it can be useful to have a builder
+ * for Iterators so they can be treated uniformly along with the collections.
+ * See scala.util.Random.shuffle for an example.
+ */
+ class IteratorCanBuildFrom[A] extends generic.CanBuildFrom[Iterator[A], A, Iterator[A]] {
+ def newIterator = new ArrayBuffer[A] mapResult (_.iterator)
+
+ /** Creates a new builder on request of a collection.
+ * @param from the collection requesting the builder to be created.
+ * @return the result of invoking the `genericBuilder` method on `from`.
+ */
+ def apply(from: Iterator[A]) = newIterator
+
+ /** Creates a new builder from scratch
+ * @return the result of invoking the `newBuilder` method of this factory.
+ */
+ def apply() = newIterator
+ }
+
+ implicit def iteratorCanBuildFrom[T]: IteratorCanBuildFrom[T] = new IteratorCanBuildFrom[T]
+
/** A wrapper class for the `flatten` method that is added to
* class `Iterator` with implicit conversion
* @see iteratorIteratorWrapper.
@@ -233,7 +254,7 @@ object Iterator {
def next(): Int = { val j = i; i = step(i); j }
}
- /** Create an iterator that is the concantenation of all iterators
+ /** Create an iterator that is the concatenation of all iterators
* returned by a given iterator of iterators.
* @param its The iterator which returns on each call to next
* a new iterator whose elements are to be concatenated to the result.
@@ -265,7 +286,8 @@ import Iterator.empty
* @define mayNotTerminateInf
* Note: may not terminate for infinite iterators.
*/
-trait Iterator[+A] { self =>
+trait Iterator[+A] extends TraversableOnce[A] {
+ self =>
/** Tests whether this iterator can provide another element.
* @return `true` if a subsequent call to `next` will yield an element,
@@ -279,6 +301,22 @@ trait Iterator[+A] { self =>
*/
def next(): A
+ /** Tests whether this iterator is empty.
+ * @return `true` if hasNext is false, `false` otherwise.
+ */
+ def isEmpty: Boolean = !hasNext
+
+ /** Tests whether this Iterator can be repeatedly traversed.
+ * @return `false`
+ */
+ def isTraversableAgain = false
+
+ /** Tests whether this Iterator has a known size.
+ *
+ * @return `true` for empty Iterators, `false` otherwise.
+ */
+ def hasDefiniteSize = isEmpty
+
/** Selects first ''n'' values of this iterator.
* @param n the number of values to take
* @return an iterator producing only of the first `n` values of this iterator, or else the
@@ -319,8 +357,8 @@ trait Iterator[+A] { self =>
/** Creates a new iterator that maps all produced values of this iterator
* to new values using a transformation function.
* @param f the transformation function
- * @return a new iterator which transformes every value produced by this
- * iterator by applying the functon `f` to it.
+ * @return a new iterator which transforms every value produced by this
+ * iterator by applying the function `f` to it.
*/
def map[B](f: A => B): Iterator[B] = new Iterator[B] {
def hasNext = self.hasNext
@@ -328,7 +366,7 @@ trait Iterator[+A] { self =>
}
/** Concatenates this iterator with another.
- * @that the other iterator
+ * @param that the other iterator
* @return a new iterator that first yields the values produced by this
* iterator followed by the values produced by iterator `that`.
* @usecase def ++(that: => Iterator[A]): Iterator[A]
@@ -411,7 +449,11 @@ trait Iterator[+A] { self =>
* @return a new iterator which yields each value `x` produced by this iterator for
* which `pf` is defined the image `pf(x)`.
*/
- def partialMap[B](pf: PartialFunction[A, B]): Iterator[B] = {
+ @migration(2, 8,
+ "This collect implementation bears no relationship to the one before 2.8.\n"+
+ "The previous behavior can be reproduced with toSeq."
+ )
+ def collect[B](pf: PartialFunction[A, B]): Iterator[B] = {
val self = buffered
new Iterator[B] {
private def skip() = while (self.hasNext && !pf.isDefinedAt(self.head)) self.next()
@@ -667,12 +709,12 @@ trait Iterator[+A] { self =>
if (found) i else -1
}
- /** Returns the index of the first occurence of the specified
+ /** Returns the index of the first occurrence of the specified
* object in this iterable object.
* $mayNotTerminateInf
*
* @param elem element to search for.
- * @return the index of the first occurence of `elem` in the values produced by this iterator,
+ * @return the index of the first occurrence of `elem` in the values produced by this iterator,
* or -1 if such an element does not exist until the end of the iterator is reached.
*/
def indexOf[B >: A](elem: B): Int = {
@@ -688,131 +730,6 @@ trait Iterator[+A] { self =>
if (found) i else -1
}
- /** Applies a binary operator to a start value and all values produced by this iterator, going left to right.
- * $willNotTerminateInf
- * @param z the start value.
- * @param op the binary operator.
- * @tparam B the result type of the binary operator.
- * @return the result of inserting `op` between consecutive values produced by this iterator
- * going left to right with the start value `z` on the left:
- * {{{
- * op(...op(z, x,,1,,), x,,2,,, ..., x,,n,,)
- * }}}
- * where `x,,1,,, ..., x,,n,,` are the values produced by this iterator.
- */
- def foldLeft[B](z: B)(op: (B, A) => B): B = {
- var acc = z
- while (hasNext) acc = op(acc, next())
- acc
- }
-
- /** Applies a binary operator to all values produced by this iterator and a start value, going right to left.
- * $willNotTerminateInf
- * @param z the start value.
- * @param op the binary operator.
- * @tparam B the result type of the binary operator.
- * @return the result of inserting `op` between consecutive values produced by this iterator
- * going right to left with the start value `z` on the right:
- * {{{
- * op(x,,1,,, op(x,,2,,, ... op(x,,n,,, z)...))
- * }}}
- * where `x,,1,,, ..., x,,n,,` are the values produced by this iterator.
- */
- def foldRight[B](z: B)(op: (A, B) => B): B =
- if (hasNext) op(next(), foldRight(z)(op)) else z
-
- /** Applies a binary operator to a start value and all values produced by this iterator, going left to right.
- *
- * Note: `/:` is alternate syntax for `foldLeft`; `z /: it` is the same as `it foldLeft z`.
- * $willNotTerminateInf
- *
- * @param z the start value.
- * @param op the binary operator.
- * @tparam B the result type of the binary operator.
- * @return the result of inserting `op` between consecutive values produced by this iterator
- * going left to right with the start value `z` on the left:
- * {{{
- * op(...op(z, x,,1,,), x,,2,,, ..., x,,n,,)
- * }}}
- * where `x,,1,,, ..., x,,n,,` are the values produced by this iterator.
- */
- def /:[B](z: B)(op: (B, A) => B): B = foldLeft(z)(op)
-
- /** Applies a binary operator to all values produced by this iterator and a start value, going right to left.
- * Note: `:\` is alternate syntax for `foldRight`; `it :\ z` is the same as `it foldRight z`.
- * $willNotTerminateInf
- * @param z the start value.
- * @param op the binary operator.
- * @tparam B the result type of the binary operator.
- * @return the result of inserting `op` between consecutive values produced by this iterator
- * going right to left with the start value `z` on the right:
- * {{{
- * op(x,,1,,, op(x,,2,,, ... op(x,,n,,, z)...))
- * }}}
- * where `x,,1,,, ..., x,,n,,` are the values produced by this iterator.
- */
- def :\[B](z: B)(op: (A, B) => B): B = foldRight(z)(op)
-
- /** Applies a binary operator to all values produced by this iterator, going left to right.
- * $willNotTerminateInf
- *
- * @param op the binary operator.
- * @tparam B the result type of the binary operator.
- * @return the result of inserting `op` between consecutive values produced by this iterator
- * going left to right:
- * {{{
- * op(...(op(x,,1,,, x,,2,,), ... ) , x,,n,,)
- * }}}
- * where `x,,1,,, ..., x,,n,,` are the values produced by this iterator.
- * @throws `UnsupportedOperationException` if this iterator is empty.
- */
- def reduceLeft[B >: A](op: (B, A) => B): B = {
- if (hasNext) foldLeft[B](next())(op)
- else throw new UnsupportedOperationException("empty.reduceLeft")
- }
-
- /** Applies a binary operator to all values produced by this iterator, going right to left.
- * $willNotTerminateInf
- *
- * @param op the binary operator.
- * @tparam B the result type of the binary operator.
- * @return the result of inserting `op` between consecutive values produced by this iterator
- * going right to left:
- * {{{
- * op(x,,1,,, op(x,,2,,, ..., op(x,,n-1,,, x,,n,,)...))
- * }}}
- * where `x,,1,,, ..., x,,n,,` are the values produced by this iterator.
- * @throws `UnsupportedOperationException` if this iterator is empty.
- */
- def reduceRight[B >: A](op: (A, B) => B): B = {
- if (hasNext) foldRight[B](next())(op)
- else throw new UnsupportedOperationException("empty.reduceRight")
- }
-
- /** Optionally applies a binary operator to all values produced by this iterator, going left to right.
- * $willNotTerminateInf
- *
- * @param op the binary operator.
- * @tparam B the result type of the binary operator.
- * @return an option value containing the result of `reduceLeft(op)` is this iterator is nonempty,
- * `None` otherwise.
- */
- def reduceLeftOption[B >: A](op: (B, A) => B): Option[B] = {
- if (!hasNext) None else Some(reduceLeft(op))
- }
-
- /** Optionally applies a binary operator to all values produced by this iterator, going right to left.
- * $willNotTerminateInf
- *
- * @param op the binary operator.
- * @tparam B the result type of the binary operator.
- * @return an option value containing the result of `reduceRight(op)` is this iterator is nonempty,
- * `None` otherwise.
- */
- def reduceRightOption[B >: A](op: (A, B) => B): Option[B] = {
- if (!hasNext) None else Some(reduceRight(op))
- }
-
/** Creates a buffered iterator from this iterator.
* @see BufferedIterator
* @return a buffered iterator producing the same values as this iterator.
@@ -937,6 +854,8 @@ trait Iterator[+A] { self =>
if (!filled)
fill()
+ if (!filled)
+ throw new NoSuchElementException("next on empty iterator")
filled = false
buffer.toList
}
@@ -985,16 +904,11 @@ trait Iterator[+A] { self =>
*
* Note: The iterator is at its end after this method returns.
*/
- def length: Int = {
- var i = 0
- while (hasNext) {
- next(); i += 1
- }
- i
- }
+ def length: Int = this.size
/** Creates two new iterators that both iterate over the same elements
- * as this iterator (in the same order).
+ * as this iterator (in the same order). The duplicate iterators are
+ * considered equal if they are positioned at the same element.
*
* @return a pair of iterators
*/
@@ -1013,6 +927,14 @@ trait Iterator[+A] { self =>
e
} else gap.dequeue
}
+ // to verify partnerhood we use reference equality on gap because
+ // type testing does not discriminate based on origin.
+ private def compareGap(queue: scala.collection.mutable.Queue[A]) = gap eq queue
+ override def hashCode = gap.hashCode
+ override def equals(other: Any) = other match {
+ case x: Partner => x.compareGap(gap) && gap.isEmpty
+ case _ => super.equals(other)
+ }
}
(new Partner, new Partner)
}
@@ -1063,75 +985,7 @@ trait Iterator[+A] { self =>
}
}
- /** Copies values produced by this iterator to an array.
- * Fills the given array `xs` with values produced by this iterator, after skipping `start` values.
- * Copying will stop once either the end of the current iterator is reached,
- * or the end of the array is reached.
- *
- * $willNotTerminateInf
- *
- * @param xs the array to fill.
- * @param start the starting index.
- * @tparam B the type of the elements of the array.
- *
- * @usecase def copyToArray(xs: Array[A], start: Int, len: Int): Unit
- */
- def copyToArray[B >: A](xs: Array[B], start: Int): Unit =
- copyToArray(xs, start, xs.length - start)
-
- /** Copies values produced by this iterator to an array.
- * Fills the given array `xs` with values produced by this iterator.
- * Copying will stop once either the end of the current iterator is reached,
- * or the end of the array is reached.
- *
- * $willNotTerminateInf
- *
- * @param xs the array to fill.
- * @tparam B the type of the elements of the array.
- *
- * @usecase def copyToArray(xs: Array[A], start: Int, len: Int): Unit
- */
- def copyToArray[B >: A](xs: Array[B]): Unit = copyToArray(xs, 0, xs.length)
-
- /** Copies all values produced by this iterator to a buffer.
- * $willNotTerminateInf
- * @param dest The buffer to which elements are copied
- */
- def copyToBuffer[B >: A](dest: Buffer[B]) {
- while (hasNext) dest += next()
- }
-
- /** Traverses this iterator and returns all produced values in a list.
- * $willNotTerminateInf
- *
- * @return a list which contains all values produced by this iterator.
- */
- def toList: List[A] = {
- val res = new ListBuffer[A]
- while (hasNext) res += next
- res.toList
- }
-
- /** Lazily wraps a Stream around this iterator so its values are memoized.
- *
- * @return a Stream which can repeatedly produce all the values
- * produced by this iterator.
- */
- def toStream: Stream[A] =
- if (hasNext) Stream.cons(next, toStream) else Stream.empty
-
- /** Traverses this iterator and returns all produced values in a sequence.
- * $willNotTerminateInf
- *
- * @return a list which contains all values produced by this iterator.
- */
- def toSeq: Seq[A] = {
- val buffer = new ArrayBuffer[A]
- this copyToBuffer buffer
- buffer
- }
-
- /** Tests if another iterator produces the same valeus as this one.
+ /** Tests if another iterator produces the same values as this one.
* $willNotTerminateInf
* @param that the other iterator
* @return `true`, if both iterators produce the same elements in the same order, `false` otherwise.
@@ -1144,76 +998,8 @@ trait Iterator[+A] { self =>
!hasNext && !that.hasNext
}
- /** Displays all values produced by this iterator in a string using start, end, and separator strings.
- *
- * @param start the starting string.
- * @param sep the separator string.
- * @param end the ending string.
- * @return a string representation of this iterator. The resulting string
- * begins with the string `start` and ends with the string
- * `end`. Inside, the string representations (w.r.t. the method `toString`)
- * of all values produced by this iterator are separated by the string `sep`.
- */
- def mkString(start: String, sep: String, end: String): String = {
- val buf = new StringBuilder
- addString(buf, start, sep, end).toString
- }
-
- /** Displays all values produced by this iterator in a string using a separator string.
- *
- * @param sep the separator string.
- * @return a string representation of this iterator. In the resulting string
- * the string representations (w.r.t. the method `toString`)
- * of all values produced by this iterator are separated by the string `sep`.
- */
- def mkString(sep: String): String = mkString("", sep, "")
-
- /** Displays all values produced by this iterator in a string.
- * @return a string representation of this iterator. In the resulting string
- * the string representations (w.r.t. the method `toString`)
- * of all values produced by this iterator follow each other without any separator string.
- */
- def mkString: String = mkString("")
-
- /** Appends all values produced by this iterator to a string builder using start, end, and separator strings.
- * The written text begins with the string `start` and ends with the string
- * `end`. Inside, the string representations (w.r.t. the method `toString`)
- * of all values produced by this iterator are separated by the string `sep`.
- *
- * @param b the string builder to which elements are appended.
- * @param start the starting string.
- * @param sep the separator string.
- * @param end the ending string.
- * @return the string builder `b` to which elements were appended.
- */
- def addString(buf: StringBuilder, start: String, sep: String, end: String): StringBuilder = {
- buf.append(start)
- val elems = this
- if (elems.hasNext) buf.append(elems.next)
- while (elems.hasNext) {
- buf.append(sep); buf.append(elems.next)
- }
- buf.append(end)
- }
-
- /** Appends all values produced by this iterator to a string builder using a separator string.
- * The written text consists of the string representations (w.r.t. the method `toString`)
- * of all values produced by this iterator, separated by the string `sep`.
- *
- * @param b the string builder to which elements are appended.
- * @param sep the separator string.
- * @return the string builder `b` to which elements were appended.
- */
- def addString(buf: StringBuilder, sep: String): StringBuilder = addString(buf, "", sep, "")
-
- /** Appends all values produced by this iterator to a string builder.
- * The written text consists of the string representations (w.r.t. the method `toString`)
- * of all values produced by this iterator without any separator string.
- *
- * @param b the string builder to which elements are appended.
- * @return the string builder `b` to which elements were appended.
- */
- def addString(buf: StringBuilder): StringBuilder = addString(buf, "", "", "")
+ def toTraversable: Traversable[A] = toStream
+ def toIterator: Iterator[A] = self
/** Converts this iterator to a string.
* @return `"empty iterator"` or `"non-empty iterator"`, depending on whether or not the iterator is empty.
@@ -1230,13 +1016,6 @@ trait Iterator[+A] { self =>
@deprecated("use `indexWhere` instead")
def findIndexOf(p: A => Boolean): Int = indexWhere(p)
- /** Collect elements into a seq.
- *
- * @return a sequence which enumerates all elements of this iterator.
- */
- @deprecated("use toSeq instead")
- def collect: Seq[A] = toSeq
-
/** Returns a counted iterator from this iterator.
*/
@deprecated("use zipWithIndex in Iterator")
@@ -1254,7 +1033,7 @@ trait Iterator[+A] { self =>
* @param xs the array to fill.
* @param start the starting index.
* @param sz the maximum number of elements to be read.
- * @pre the array must be large enough to hold `sz` elements.
+ * @note the array must be large enough to hold `sz` elements.
*/
@deprecated("use copyToArray instead")
def readInto[B >: A](xs: Array[B], start: Int, sz: Int) {
diff --git a/src/library/scala/collection/JavaConversions.scala b/src/library/scala/collection/JavaConversions.scala
index 7af138067b..00f2d745af 100644
--- a/src/library/scala/collection/JavaConversions.scala
+++ b/src/library/scala/collection/JavaConversions.scala
@@ -40,7 +40,7 @@ package scala.collection
* <p>
* Note that no conversion is provided from <code>scala.collection.immutable.List</code>
* to <code>java.util.List</code>. Instead it is convertible to an immutable
- * <code>java.util.Collection</code> which provides size and interation
+ * <code>java.util.Collection</code> which provides size and interaction
* capabilities, but not access by index as would be provided by
* <code>java.util.List</code>.<br/>
* This is intentional: in combination the implementation of
@@ -497,9 +497,10 @@ object JavaConversions {
case class MutableMapWrapper[A, B](underlying : mutable.Map[A, B])(m : ClassManifest[A])
extends MutableMapWrapperLike[A, B](underlying)(m)
- abstract class JMapWrapperLike[A, B, +Repr <: mutable.MapLike[A, B, Repr] with mutable.Map[A, B]]
- (underlying: ju.Map[A, B])
+ trait JMapWrapperLike[A, B, +Repr <: mutable.MapLike[A, B, Repr] with mutable.Map[A, B]]
extends mutable.Map[A, B] with mutable.MapLike[A, B, Repr] {
+ def underlying: ju.Map[A, B]
+
override def size = underlying.size
def get(k : A) = {
@@ -538,8 +539,8 @@ object JavaConversions {
override def empty: Repr = null.asInstanceOf[Repr]
}
- case class JMapWrapper[A, B](underlying : ju.Map[A, B])
- extends JMapWrapperLike[A, B, JMapWrapper[A, B]](underlying) {
+ case class JMapWrapper[A, B](val underlying : ju.Map[A, B])
+ extends JMapWrapperLike[A, B, JMapWrapper[A, B]] {
override def empty = JMapWrapper(new ju.HashMap[A, B])
}
@@ -584,8 +585,8 @@ object JavaConversions {
}
- case class JConcurrentMapWrapper[A, B](underlying: juc.ConcurrentMap[A, B])
- extends JMapWrapperLike[A, B, JConcurrentMapWrapper[A, B]](underlying) with mutable.ConcurrentMap[A, B] {
+ case class JConcurrentMapWrapper[A, B](val underlying: juc.ConcurrentMap[A, B])
+ extends JMapWrapperLike[A, B, JConcurrentMapWrapper[A, B]] with mutable.ConcurrentMap[A, B] {
override def get(k: A) = {
val v = underlying.get(k)
if (v != null) Some(v)
diff --git a/src/library/scala/collection/LinearSeq.scala b/src/library/scala/collection/LinearSeq.scala
index 5862741530..1afb2fdb7f 100644
--- a/src/library/scala/collection/LinearSeq.scala
+++ b/src/library/scala/collection/LinearSeq.scala
@@ -14,18 +14,10 @@ package scala.collection
import generic._
import mutable.Builder
-/** <p>
- * Class <code>Linear[A]</code> represents linear sequences of elements.
- * For such sequences <code>isEmpty</code>, <code>head</code> and
- * <code>tail</code> are guaranteed to be efficient constant time (or near so)
- * operations.<br/>
- * It does not add any methods to <code>Seq</code> but overrides several
- * methods with optimized implementations.
- * </p>
+/** A base trait for linear sequences.
+ * $linearSeqInfo
*
* @author Martin Odersky
- * @author Matthias Zenger
- * @version 1.0, 16/07/2003
* @since 2.8
*/
trait LinearSeq[+A] extends Seq[A]
diff --git a/src/library/scala/collection/LinearSeqLike.scala b/src/library/scala/collection/LinearSeqLike.scala
index 9bed88967c..1c99d4a3d9 100644
--- a/src/library/scala/collection/LinearSeqLike.scala
+++ b/src/library/scala/collection/LinearSeqLike.scala
@@ -19,12 +19,16 @@ import scala.util.control.Breaks._
/** A template trait for linear sequences of type `LinearSeq[A]`.
*
* $linearSeqInfo
+ *
+ * This trait just implements `iterator`
+ * in terms of `isEmpty, ``head`, and `tail`.
+ * However, see `LinearSeqOptimized` for an implementation trait that overrides operations
+ * to make them run faster under the assumption of fast linear access with `head` and `tail`.
+ *
* @author Martin Odersky
- * @author Matthias Zenger
- * @version 1.0, 16/07/2003
+ * @version 2.8
* @since 2.8
*
- * @define Coll LinearSeq
* @define linearSeqInfo
* Linear sequences are defined in terms of three abstract methods, which are assumed
* to have efficient implementations. These are:
@@ -35,9 +39,8 @@ import scala.util.control.Breaks._
* }}}
* Here, `A` is the type of the sequence elements and `Repr` is the type of the sequence itself.
*
- * Linear sequences do not define any new methods wrt `Seq`. However, abstract `Seq` methods
- * are defined in terms of `isEmpty`, `head`, and `tail`, and several other methods are overridden
- * with optimized implementations.
+ * Linear sequences do not add any new methods to `Seq`, but promise efficient implementations
+ * of linear access patterns.
*
* @tparam A the element type of the $coll
* @tparam Repr the type of the actual $coll containing the elements.
@@ -47,38 +50,6 @@ trait LinearSeqLike[+A, +Repr <: LinearSeqLike[A, Repr]] extends SeqLike[A, Repr
override protected[this] def thisCollection: LinearSeq[A] = this.asInstanceOf[LinearSeq[A]]
override protected[this] def toCollection(repr: Repr): LinearSeq[A] = repr.asInstanceOf[LinearSeq[A]]
- def isEmpty: Boolean
-
- def head: A
-
- def tail: Repr
-
- /** The length of the $coll.
- *
- * $willNotTerminateInf
- *
- * Note: the execution of `length` may take time proportial to the length of the sequence.
- */
- def length: Int = {
- var these = self
- var len = 0
- while (!these.isEmpty) {
- len += 1
- these = these.tail
- }
- len
- }
-
- /** Selects an element by its index in the $coll.
- * Note: the execution of `apply` may take time proportial to the index value.
- * @throws `IndexOutOfBoundsEsxception` if `idx` does not satisfy `0 <= idx < length`.
- */
- def apply(n: Int): A = {
- val rest = drop(n)
- if (n < 0 || rest.isEmpty) throw new IndexOutOfBoundsException
- rest.head
- }
-
override /*IterableLike*/
def iterator: Iterator[A] = new Iterator[A] {
var these = self
@@ -89,239 +60,4 @@ trait LinearSeqLike[+A, +Repr <: LinearSeqLike[A, Repr]] extends SeqLike[A, Repr
} else Iterator.empty.next
override def toList: List[A] = these.toList
}
-
- override /*IterableLike*/
- def foreach[B](f: A => B) {
- var these = this
- while (!these.isEmpty) {
- f(these.head)
- these = these.tail
- }
- }
-
-
- override /*IterableLike*/
- def forall(p: A => Boolean): Boolean = {
- var these = this
- while (!these.isEmpty) {
- if (!p(these.head)) return false
- these = these.tail
- }
- true
- }
-
- override /*IterableLike*/
- def exists(p: A => Boolean): Boolean = {
- var these = this
- while (!these.isEmpty) {
- if (p(these.head)) return true
- these = these.tail
- }
- false
- }
-
- override /*TraversableLike*/
- def count(p: A => Boolean): Int = {
- var these = this
- var cnt = 0
- while (!these.isEmpty) {
- if (p(these.head)) cnt += 1
- these = these.tail
- }
- cnt
- }
-
- override /*IterableLike*/
- def find(p: A => Boolean): Option[A] = {
- var these = this
- while (!these.isEmpty) {
- if (p(these.head)) return Some(these.head)
- these = these.tail
- }
- None
- }
-/*
- override def mapFind[B](f: A => Option[B]): Option[B] = {
- var res: Option[B] = None
- var these = this
- while (res.isEmpty && !these.isEmpty) {
- res = f(these.head)
- these = these.tail
- }
- res
- }
-*/
- override /*TraversableLike*/
- def foldLeft[B](z: B)(f: (B, A) => B): B = {
- var acc = z
- var these = this
- while (!these.isEmpty) {
- acc = f(acc, these.head)
- these = these.tail
- }
- acc
- }
-
- override /*IterableLike*/
- def foldRight[B](z: B)(f: (A, B) => B): B =
- if (this.isEmpty) z
- else f(head, tail.foldRight(z)(f))
-
- override /*TraversableLike*/
- def reduceLeft[B >: A](f: (B, A) => B): B =
- if (isEmpty) throw new UnsupportedOperationException("empty.reduceLeft")
- else tail.foldLeft[B](head)(f)
-
- override /*IterableLike*/
- def reduceRight[B >: A](op: (A, B) => B): B =
- if (isEmpty) throw new UnsupportedOperationException("Nil.reduceRight")
- else if (tail.isEmpty) head
- else op(head, tail.reduceRight(op))
-
- override /*TraversableLike*/
- def last: A = {
- if (isEmpty) throw new NoSuchElementException
- var these = this
- var nx = these.tail
- while (!nx.isEmpty) {
- these = nx
- nx = nx.tail
- }
- these.head
- }
-
- override /*IterableLike*/
- def take(n: Int): Repr = {
- val b = newBuilder
- var i = 0
- var these = repr
- while (!these.isEmpty && i < n) {
- i += 1
- b += these.head
- these = these.tail
- }
- b.result
- }
-
- override /*TraversableLike*/
- def drop(n: Int): Repr = {
- var these: Repr = repr
- var count = n
- while (!these.isEmpty && count > 0) {
- these = these.tail
- count -= 1
- }
- these
- }
-
- override /*IterableLike*/
- def dropRight(n: Int): Repr = {
- val b = newBuilder
- var these = this
- var lead = this drop n
- while (!lead.isEmpty) {
- b += these.head
- these = these.tail
- lead = lead.tail
- }
- b.result
- }
-
- override /*IterableLike*/
- def slice(from: Int, until: Int): Repr = {
- val b = newBuilder
- var i = from
- var these = this drop from
- while (i < until && !these.isEmpty) {
- b += these.head
- these = these.tail
- i += 1
- }
- b.result
- }
-
- override /*IterableLike*/
- def takeWhile(p: A => Boolean): Repr = {
- val b = newBuilder
- var these = this
- while (!these.isEmpty && p(these.head)) {
- b += these.head
- these = these.tail
- }
- b.result
- }
-
- override /*TraversableLike*/
- def span(p: A => Boolean): (Repr, Repr) = {
- var these: Repr = repr
- val b = newBuilder
- while (!these.isEmpty && p(these.head)) {
- b += these.head
- these = these.tail
- }
- (b.result, these)
- }
-
- override /*IterableLike*/
- def sameElements[B >: A](that: Iterable[B]): Boolean = that match {
- case that1: LinearSeq[_] =>
- var these = this
- var those = that1
- while (!these.isEmpty && !those.isEmpty && these.head == those.head) {
- these = these.tail
- those = those.tail
- }
- these.isEmpty && those.isEmpty
- case _ =>
- super.sameElements(that)
- }
-
- override /*SeqLike*/
- def lengthCompare(len: Int): Int = {
- var i = 0
- var these = self
- while (!these.isEmpty && i <= len) {
- i += 1
- these = these.tail
- }
- i - len
- }
-
- override /*SeqLike*/
- def isDefinedAt(x: Int): Boolean = x >= 0 && lengthCompare(x) > 0
-
- override /*SeqLike*/
- def segmentLength(p: A => Boolean, from: Int): Int = {
- var i = 0
- var these = this drop from
- while (!these.isEmpty && p(these.head)) {
- i += 1
- these = these.tail
- }
- i
- }
-
- override /*SeqLike*/
- def indexWhere(p: A => Boolean, from: Int): Int = {
- var i = from
- var these = this drop from
- while (!these.isEmpty && !p(these.head)) {
- i += 1
- these = these.tail
- }
- if (these.isEmpty) -1 else i
- }
-
- override /*SeqLike*/
- def lastIndexWhere(p: A => Boolean, end: Int): Int = {
- var i = 0
- var these = this
- var last = -1
- while (!these.isEmpty && i <= end) {
- if (p(these.head)) last = i
- these = these.tail
- i += 1
- }
- last
- }
}
diff --git a/src/library/scala/collection/LinearSeqOptimized.scala b/src/library/scala/collection/LinearSeqOptimized.scala
new file mode 100755
index 0000000000..7d3c58ad85
--- /dev/null
+++ b/src/library/scala/collection/LinearSeqOptimized.scala
@@ -0,0 +1,301 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+// $Id: LinearSeqOptimized.scala 20608 2010-01-20 00:28:09Z extempore $
+
+
+package scala.collection
+import generic._
+
+import mutable.ListBuffer
+import immutable.List
+import scala.util.control.Breaks._
+
+/** A template trait for linear sequences of type `LinearSeq[A]` which optimizes
+ * the implementation of several methods under the assumption of fast linear access.
+ *
+ * $linearSeqInfo
+ * @author Martin Odersky
+ * @version 2.8
+ * @since 2.8
+ *
+ * @tparam A the element type of the $coll
+ * @tparam Repr the type of the actual $coll containing the elements.
+ */
+trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends LinearSeqLike[A, Repr] { self: Repr =>
+
+ def isEmpty: Boolean
+
+ def head: A
+
+ def tail: Repr
+
+ /** The length of the $coll.
+ *
+ * $willNotTerminateInf
+ *
+ * Note: the execution of `length` may take time proportial to the length of the sequence.
+ */
+ def length: Int = {
+ var these = self
+ var len = 0
+ while (!these.isEmpty) {
+ len += 1
+ these = these.tail
+ }
+ len
+ }
+
+ /** Selects an element by its index in the $coll.
+ * Note: the execution of `apply` may take time proportial to the index value.
+ * @throws `IndexOutOfBoundsException` if `idx` does not satisfy `0 <= idx < length`.
+ */
+ def apply(n: Int): A = {
+ val rest = drop(n)
+ if (n < 0 || rest.isEmpty) throw new IndexOutOfBoundsException
+ rest.head
+ }
+
+ override /*IterableLike*/
+ def foreach[B](f: A => B) {
+ var these = this
+ while (!these.isEmpty) {
+ f(these.head)
+ these = these.tail
+ }
+ }
+
+
+ override /*IterableLike*/
+ def forall(p: A => Boolean): Boolean = {
+ var these = this
+ while (!these.isEmpty) {
+ if (!p(these.head)) return false
+ these = these.tail
+ }
+ true
+ }
+
+ override /*IterableLike*/
+ def exists(p: A => Boolean): Boolean = {
+ var these = this
+ while (!these.isEmpty) {
+ if (p(these.head)) return true
+ these = these.tail
+ }
+ false
+ }
+
+ override /*TraversableLike*/
+ def count(p: A => Boolean): Int = {
+ var these = this
+ var cnt = 0
+ while (!these.isEmpty) {
+ if (p(these.head)) cnt += 1
+ these = these.tail
+ }
+ cnt
+ }
+
+ override /*IterableLike*/
+ def find(p: A => Boolean): Option[A] = {
+ var these = this
+ while (!these.isEmpty) {
+ if (p(these.head)) return Some(these.head)
+ these = these.tail
+ }
+ None
+ }
+/*
+ override def mapFind[B](f: A => Option[B]): Option[B] = {
+ var res: Option[B] = None
+ var these = this
+ while (res.isEmpty && !these.isEmpty) {
+ res = f(these.head)
+ these = these.tail
+ }
+ res
+ }
+*/
+ override /*TraversableLike*/
+ def foldLeft[B](z: B)(f: (B, A) => B): B = {
+ var acc = z
+ var these = this
+ while (!these.isEmpty) {
+ acc = f(acc, these.head)
+ these = these.tail
+ }
+ acc
+ }
+
+ override /*IterableLike*/
+ def foldRight[B](z: B)(f: (A, B) => B): B =
+ if (this.isEmpty) z
+ else f(head, tail.foldRight(z)(f))
+
+ override /*TraversableLike*/
+ def reduceLeft[B >: A](f: (B, A) => B): B =
+ if (isEmpty) throw new UnsupportedOperationException("empty.reduceLeft")
+ else tail.foldLeft[B](head)(f)
+
+ override /*IterableLike*/
+ def reduceRight[B >: A](op: (A, B) => B): B =
+ if (isEmpty) throw new UnsupportedOperationException("Nil.reduceRight")
+ else if (tail.isEmpty) head
+ else op(head, tail.reduceRight(op))
+
+ override /*TraversableLike*/
+ def last: A = {
+ if (isEmpty) throw new NoSuchElementException
+ var these = this
+ var nx = these.tail
+ while (!nx.isEmpty) {
+ these = nx
+ nx = nx.tail
+ }
+ these.head
+ }
+
+ override /*IterableLike*/
+ def take(n: Int): Repr = {
+ val b = newBuilder
+ var i = 0
+ var these = repr
+ while (!these.isEmpty && i < n) {
+ i += 1
+ b += these.head
+ these = these.tail
+ }
+ b.result
+ }
+
+ override /*TraversableLike*/
+ def drop(n: Int): Repr = {
+ var these: Repr = repr
+ var count = n
+ while (!these.isEmpty && count > 0) {
+ these = these.tail
+ count -= 1
+ }
+ these
+ }
+
+ override /*IterableLike*/
+ def dropRight(n: Int): Repr = {
+ val b = newBuilder
+ var these = this
+ var lead = this drop n
+ while (!lead.isEmpty) {
+ b += these.head
+ these = these.tail
+ lead = lead.tail
+ }
+ b.result
+ }
+
+ override /*IterableLike*/
+ def slice(from: Int, until: Int): Repr = {
+ val b = newBuilder
+ var i = from
+ var these = this drop from
+ while (i < until && !these.isEmpty) {
+ b += these.head
+ these = these.tail
+ i += 1
+ }
+ b.result
+ }
+
+ override /*IterableLike*/
+ def takeWhile(p: A => Boolean): Repr = {
+ val b = newBuilder
+ var these = this
+ while (!these.isEmpty && p(these.head)) {
+ b += these.head
+ these = these.tail
+ }
+ b.result
+ }
+
+ override /*TraversableLike*/
+ def span(p: A => Boolean): (Repr, Repr) = {
+ var these: Repr = repr
+ val b = newBuilder
+ while (!these.isEmpty && p(these.head)) {
+ b += these.head
+ these = these.tail
+ }
+ (b.result, these)
+ }
+
+ override /*IterableLike*/
+ def sameElements[B >: A](that: Iterable[B]): Boolean = that match {
+ case that1: LinearSeq[_] =>
+ var these = this
+ var those = that1
+ while (!these.isEmpty && !those.isEmpty && these.head == those.head) {
+ these = these.tail
+ those = those.tail
+ }
+ these.isEmpty && those.isEmpty
+ case _ =>
+ super.sameElements(that)
+ }
+
+ override /*SeqLike*/
+ def lengthCompare(len: Int): Int = {
+ var i = 0
+ var these = self
+ while (!these.isEmpty && i <= len) {
+ i += 1
+ these = these.tail
+ }
+ i - len
+ }
+
+ override /*SeqLike*/
+ def isDefinedAt(x: Int): Boolean = x >= 0 && lengthCompare(x) > 0
+
+ override /*SeqLike*/
+ def segmentLength(p: A => Boolean, from: Int): Int = {
+ var i = 0
+ var these = this drop from
+ while (!these.isEmpty && p(these.head)) {
+ i += 1
+ these = these.tail
+ }
+ i
+ }
+
+ override /*SeqLike*/
+ def indexWhere(p: A => Boolean, from: Int): Int = {
+ var i = from
+ var these = this drop from
+ while (these.nonEmpty) {
+ if (p(these.head))
+ return i
+
+ i += 1
+ these = these.tail
+ }
+ -1
+ }
+
+ override /*SeqLike*/
+ def lastIndexWhere(p: A => Boolean, end: Int): Int = {
+ var i = 0
+ var these = this
+ var last = -1
+ while (!these.isEmpty && i <= end) {
+ if (p(these.head)) last = i
+ these = these.tail
+ i += 1
+ }
+ last
+ }
+}
diff --git a/src/library/scala/collection/MapLike.scala b/src/library/scala/collection/MapLike.scala
index 55cea1a678..5e1af7a2d7 100644
--- a/src/library/scala/collection/MapLike.scala
+++ b/src/library/scala/collection/MapLike.scala
@@ -12,6 +12,7 @@ package scala.collection
import generic._
import mutable.{Builder, StringBuilder, MapBuilder}
+import annotation.migration
import PartialFunction._
/** A template trait for maps of type `Map[A, B]` which associate keys of type `A`
@@ -71,7 +72,7 @@ self =>
/** Optionally returns the value associated with a key.
*
- * @key the key value
+ * @param key the key value
* @return an option value containing the value associated with `key` in this map,
* or `None` if none exists.
*/
@@ -109,7 +110,7 @@ self =>
* @param default a computation that yields a default value in case no binding for `key` is
* found in the map.
* @tparam B1 the result type of the default computation.
- * @return the value assocuated with `key` if it exists,
+ * @return the value associated with `key` if it exists,
* otherwise the result of the `default` computation.
* @usecase def getOrElse(key: A, default: => B): B
*/
@@ -181,15 +182,16 @@ self =>
*
* @return an iterator over all keys.
*/
- @deprecated("use `keysIterator' instead")
- def keys: Iterator[A] = keysIterator
+ @migration(2, 8, "As of 2.8, keys returns Iterable[A] rather than Iterator[A].")
+ def keys: Iterable[A] = keySet
/** Collects all values of this map in an iterable collection.
* @return the values of this map as an iterable.
*/
- def valuesIterable: Iterable[B] = new DefaultValuesIterable
+ @migration(2, 8, "As of 2.8, values returns Iterable[B] rather than Iterator[B].")
+ def values: Iterable[B] = new DefaultValuesIterable
- /** The implementation class of the iterable returned by `valuesIterable`.
+ /** The implementation class of the iterable returned by `values`.
*/
protected class DefaultValuesIterable extends Iterable[B] {
def iterator = valuesIterator
@@ -207,13 +209,6 @@ self =>
def next = iter.next._2
}
- /** Creates an iterator for all contained values.
- *
- * @return an iterator over all values.
- */
- @deprecated("use `valuesIterator' instead")
- def values: Iterator[B] = valuesIterator
-
/** Defines the default value computation for the map,
* returned when a key is not found
* The method implemented here throws an exception,
@@ -230,7 +225,7 @@ self =>
* @return an immutable map consisting only of those key value pairs of this map where the key satisfies
* the predicate `p`. The resulting map wraps the original map without copying any elements.
*/
- def filterKeys(p: A => Boolean) = new DefaultMap[A, B] {
+ def filterKeys(p: A => Boolean): Map[A, B] = new DefaultMap[A, B] {
override def foreach[C](f: ((A, B)) => C): Unit = for (kv <- self) if (p(kv._1)) f(kv)
def iterator = self.iterator.filter(kv => p(kv._1))
override def contains(key: A) = self.contains(key) && p(key)
@@ -245,7 +240,7 @@ self =>
/** A map view resulting from applying a given function `f` to each value
* associated with a key in this map.
*/
- def mapValues[C](f: B => C) = new DefaultMap[A, C] {
+ def mapValues[C](f: B => C): Map[A, C] = new DefaultMap[A, C] {
override def foreach[D](g: ((A, C)) => D): Unit = for ((k, v) <- self) g((k, f(v)))
def iterator = for ((k, v) <- self.iterator) yield (k, f(v))
override def size = self.size
@@ -291,18 +286,25 @@ self =>
* @return a new map with the given bindings added to this map
* @usecase def + (kvs: Traversable[(A, B)]): Map[A, B]
*/
- def ++[B1 >: B](kvs: Traversable[(A, B1)]): Map[A, B1] =
- ((repr: Map[A, B1]) /: kvs) (_ + _)
+ def ++[B1 >: B](xs: TraversableOnce[(A, B1)]): Map[A, B1] =
+ ((repr: Map[A, B1]) /: xs) (_ + _)
- /** Adds all key/value pairs produced by an iterator to this map, returning a new map.
+ /** Returns a new map with all key/value pairs for which the predicate
+ * <code>p</code> returns <code>true</code>.
*
- * @param iter the iterator producing key/value pairs
- * @tparam B1 the type of the added values
- * @return a new map with the given bindings added to this map
- * @usecase def + (iter: Iterator[(A, B)]): Map[A, B]
+ * @param p A predicate over key-value pairs
+ * @note This method works by successively removing elements fro which the
+ * predicate is false from this set.
+ * If removal is slow, or you expect that most elements of the set$
+ * will be removed, you might consider using <code>filter</code>
+ * with a negated predicate instead.
*/
- def ++[B1 >: B] (iter: Iterator[(A, B1)]): Map[A, B1] =
- ((repr: Map[A, B1]) /: iter) (_ + _)
+ override def filterNot(p: ((A, B)) => Boolean): This = {
+ var res: This = repr
+ for (kv <- this)
+ if (p(kv)) res = (res - kv._1).asInstanceOf[This] // !!! concrete overrides abstract problem
+ res
+ }
/** Appends all bindings of this map to a string builder using start, end, and separator strings.
* The written text begins with the string `start` and ends with the string
@@ -320,7 +322,7 @@ self =>
/** Defines the prefix of this object's `toString` representation.
* @return a string representation which starts the result of `toString` applied to this $coll.
- * Unless overridden in subclasse, the string prefix of every map is `"Map"`.
+ * Unless overridden in subclasses, the string prefix of every map is `"Map"`.
*/
override def stringPrefix: String = "Map"
@@ -351,7 +353,7 @@ self =>
}
} catch {
case ex: ClassCastException =>
- println("calss cast "); false
+ println("class cast "); false
}}
case _ =>
false
diff --git a/src/library/scala/collection/MapProxyLike.scala b/src/library/scala/collection/MapProxyLike.scala
index 427eaa6e2c..f269a368dd 100644
--- a/src/library/scala/collection/MapProxyLike.scala
+++ b/src/library/scala/collection/MapProxyLike.scala
@@ -36,10 +36,9 @@ trait MapProxyLike[A, +B, +This <: MapLike[A, B, This] with Map[A, B]]
override def isDefinedAt(key: A) = self.isDefinedAt(key)
override def keySet: Set[A] = self.keySet
override def keysIterator: Iterator[A] = self.keysIterator
- override def keys: Iterator[A] = self.keysIterator
- override def valuesIterable: Iterable[B] = self.valuesIterable
+ override def keys: Iterable[A] = self.keys
+ override def values: Iterable[B] = self.values
override def valuesIterator: Iterator[B] = self.valuesIterator
- override def values: Iterator[B] = self.valuesIterator
override def default(key: A): B = self.default(key)
override def filterKeys(p: A => Boolean) = self.filterKeys(p)
override def mapValues[C](f: B => C) = self.mapValues(f)
diff --git a/src/library/scala/collection/SeqLike.scala b/src/library/scala/collection/SeqLike.scala
index 32aae28851..0db64926a6 100644
--- a/src/library/scala/collection/SeqLike.scala
+++ b/src/library/scala/collection/SeqLike.scala
@@ -11,7 +11,7 @@
package scala.collection
-import mutable.{ListBuffer, HashMap, GenericArray}
+import mutable.{ListBuffer, HashMap, ArraySeq}
import immutable.{List, Range}
import generic._
@@ -169,7 +169,7 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] { self =>
*
* @param idx The index to select.
* @return the element of this $coll at index `idx`, where `0` indicates the first element.
- * @throws `IndexOutOfBoundsEsxception` if `idx` does not satisfy `0 <= idx < length`.
+ * @throws `IndexOutOfBoundsException` if `idx` does not satisfy `0 <= idx < length`.
*/
def apply(idx: Int): A
@@ -212,7 +212,7 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] { self =>
*/
def isDefinedAt(idx: Int): Boolean = (idx >= 0) && (idx < length)
- /** Computes length of longest segment whose elements all satisfy some preficate.
+ /** Computes length of longest segment whose elements all satisfy some predicate.
*
* $mayNotTerminateInf
*
@@ -229,7 +229,7 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] { self =>
i
}
- /** Returns the length of the longest prefix whose elements all satisfy some preficate.
+ /** Returns the length of the longest prefix whose elements all satisfy some predicate.
*
* $mayNotTerminateInf
*
@@ -261,12 +261,15 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] { self =>
def indexWhere(p: A => Boolean, from: Int): Int = {
var i = from
var it = iterator.drop(from)
- while (it.hasNext && !p(it.next()))
- i += 1
- if (it.hasNext) i else -1
+ while (it.hasNext) {
+ if (p(it.next())) return i
+ else i += 1
+ }
+
+ -1
}
- /** Returns index of the first element satisying a predicate, or `-1`.
+ /** Returns index of the first element satisfying a predicate, or `-1`.
*/
def findIndexOf(p: A => Boolean): Int = indexWhere(p)
@@ -491,7 +494,7 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] { self =>
/** Finds last index before or at a given end index where this $coll contains a given sequence as a slice.
* @param that the sequence to test
- * @param end the end idnex
+ * @param end the end index
* @return the last index `<= end` such that the elements of this $coll starting at this index
* match the elements of sequence `that`, or `-1` of no such subsequence exists.
*/
@@ -557,7 +560,7 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] { self =>
* ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form
* part of the result, but any following occurrences will.
*/
- def diff[B >: A, That](that: Seq[B]): Repr = {
+ def diff[B >: A](that: Seq[B]): Repr = {
val occ = occCounts(that)
val b = newBuilder
for (x <- this)
@@ -585,7 +588,7 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] { self =>
* ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained
* in the result, but any following occurrences will be omitted.
*/
- def intersect[B >: A, That](that: Seq[B]): Repr = {
+ def intersect[B >: A](that: Seq[B]): Repr = {
val occ = occCounts(that)
val b = newBuilder
for (x <- this)
@@ -607,7 +610,7 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] { self =>
*
* @return A new $coll which contains the first occurrence of every element of this $coll.
*/
- def removeDuplicates: Repr = {
+ def distinct: Repr = {
val b = newBuilder
var seen = Set[A]() //TR: should use mutable.HashSet?
for (x <- this) {
@@ -627,7 +630,7 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] { self =>
* @tparam B the element type of the returned $coll.
* @tparam That $thatinfo
* @param bf $bfinfo
- * @return a new collection of type `That` consisting of all elements of this $coll
+ * @return a new $coll consisting of all elements of this $coll
* except that `replaced` elements starting from `from` are replaced
* by `patch`.
* @usecase def patch(from: Int, that: Seq[A], replaced: Int): $Coll[A]
@@ -650,7 +653,7 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] { self =>
* @tparam B the element type of the returned $coll.
* @tparam That $thatinfo
* @param bf $bfinfo
- * @return a new collection of type `That` which is a copy of this $coll with the element at position `index` replaced by `elem`.
+ * @return a new $coll` which is a copy of this $coll with the element at position `index` replaced by `elem`.
* @usecase def updated(index: Int, elem: A): $Coll[A]
* @return a copy of this $coll with the element at position `index` replaced by `elem`.
*/
@@ -721,7 +724,7 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] { self =>
b ++= thisCollection
while (diff > 0) {
b += elem
- diff -=1
+ diff -= 1
}
b.result
}
@@ -757,12 +760,34 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] { self =>
* the desired ordering.
* @return a $coll consisting of the elements of this $coll
* sorted according to the comparison function `lt`.
- * @ex {{{
+ * @example {{{
* List("Steve", "Tom", "John", "Bob").sortWith(_.compareTo(_) < 0) =
* List("Bob", "John", "Steve", "Tom")
* }}}
*/
- def sortWith(lt: (A, A) => Boolean): Repr = sortWith(Ordering fromLessThan lt)
+ def sortWith(lt: (A, A) => Boolean): Repr = sorted(Ordering fromLessThan lt)
+
+ /** Sorts this $Coll according to the Ordering which results from transforming
+ * an implicitly given Ordering with a transformation function.
+ * @see scala.math.Ordering
+ * $willNotTerminateInf
+ * @param f the transformation function mapping elements
+ * to some other domain `B`.
+ * @param ord the ordering assumed on domain `B`.
+ * @tparam B the target type of the transformation `f`, and the type where
+ * the ordering `ord` is defined.
+ * @return a $coll consisting of the elements of this $coll
+ * sorted according to the ordering where `x < y` if
+ * `ord.lt(f(x), f(y))`.
+ *
+ * @example {{{
+ * val words = "The quick brown fox jumped over the lazy dog".split(' ')
+ * // this works because scala.Ordering will implicitly provide an Ordering[Tuple2[Int, Char]]
+ * words.sortBy(x => (x.length, x.head))
+ * res0: Array[String] = Array(The, dog, fox, the, lazy, over, brown, quick, jumped)
+ * }}}
+ */
+ def sortBy[B](f: A => B)(implicit ord: Ordering[B]): Repr = sorted(ord on f)
/** Sorts this $coll according to an Ordering.
*
@@ -775,42 +800,19 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] { self =>
* @return a $coll consisting of the elements of this $coll
* sorted according to the ordering `ord`.
*/
- def sortWith[B >: A](ord: Ordering[B]): Repr = {
- val arr = new GenericArray[A](this.length)
+ def sorted[B >: A](implicit ord: Ordering[B]): Repr = {
+ val arr = new ArraySeq[A](this.length)
var i = 0
for (x <- this) {
arr(i) = x
i += 1
}
- java.util.Arrays.sort(
- arr.array, ord.asInstanceOf[Ordering[Object]])
+ java.util.Arrays.sort(arr.array, ord.asInstanceOf[Ordering[Object]])
val b = newBuilder
for (x <- arr) b += x
b.result
}
- /** Sorts this $Coll according to the Ordering which results from transforming
- * an implicitly given Ordering with a transformation function.
- * @see scala.math.Ordering
- * $willNotTerminateInf
- * @param f the transformation function mapping elements
- * to some other domain `B`.
- * @param ord the ordering assumed on domain `B`.
- * @tparam B the target type of the transformation `f`, and the type where
- * the ordering `ord` is defined.
- * @return a $coll consisting of the elements of this $coll
- * sorted according to the ordering where `x < y` if
- * `ord.lt(f(x), f(y))`.
- *
- * @ex {{{
- * val words = "The quick brown fox jumped over the lazy dog".split(' ')
- * // this works because scala.Ordering will implicitly provide an Ordering[Tuple2[Int, Char]]
- * words.sortBy(x => (x.length, x.head))
- * res0: Array[String] = Array(The, dog, fox, the, lazy, over, brown, quick, jumped)
- * }}}
- */
- def sortBy[B](f: A => B)(implicit ord: Ordering[B]): Repr = sortWith(ord on f)
-
/** Converts this $coll to a sequence.
* $willNotTerminateInf
*
@@ -820,7 +822,7 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] { self =>
/** Produces the range of all indices of this sequence.
*
- * @range a `Range` value from `0` to one less than the length of this $coll.
+ * @return a `Range` value from `0` to one less than the length of this $coll.
*/
def indices: Range = 0 until length
@@ -839,16 +841,17 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] { self =>
override def hashCode() = (Seq.hashSeed /: this)(_ * 41 + _.hashCode)
override def equals(that: Any): Boolean = that match {
- case that: Seq[_] => (that canEqual this) && (this sameElements that)
- case _ => false
+ case that: Seq[_] => (that canEqual this) && (this sameElements that)
+ case _ => false
}
/* Need to override string, so that it's not the Function1's string that gets mixed in.
*/
override def toString = super[IterableLike].toString
- /** Returns index of the last element satisying a predicate, or -1.
+ /** Returns index of the last element satisfying a predicate, or -1.
*/
+ @deprecated("use `lastIndexWhere` instead")
def findLastIndexOf(p: A => Boolean): Int = lastIndexWhere(p)
/** Tests whether every element of this $coll relates to the
@@ -862,15 +865,7 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] { self =>
* and `y` of `that`, otherwise `false`.
*/
@deprecated("use `corresponds` instead")
- def equalsWith[B](that: Seq[B])(f: (A,B) => Boolean): Boolean = {
- val i = this.iterator
- val j = that.iterator
- while (i.hasNext && j.hasNext)
- if (!f(i.next, j.next))
- return false
-
- !i.hasNext && !j.hasNext
- }
+ def equalsWith[B](that: Seq[B])(f: (A,B) => Boolean): Boolean = corresponds(that)(f)
/**
* returns a projection that can be used to call non-strict <code>filter</code>,
diff --git a/src/library/scala/collection/SeqProxyLike.scala b/src/library/scala/collection/SeqProxyLike.scala
index 3dfac63dde..24ee0b430a 100644
--- a/src/library/scala/collection/SeqProxyLike.scala
+++ b/src/library/scala/collection/SeqProxyLike.scala
@@ -23,11 +23,10 @@ import generic._
* @version 2.8
* @since 2.8
*/
-trait SeqProxyLike[+A, +This <: SeqLike[A, This] with Seq[A]] extends SeqLike[A, This] with IterableProxyLike[A, This] {
+trait SeqProxyLike[+A, +Repr <: SeqLike[A, Repr] with Seq[A]] extends SeqLike[A, Repr] with IterableProxyLike[A, Repr] {
override def length: Int = self.length
override def apply(idx: Int): A = self.apply(idx)
override def lengthCompare(len: Int): Int = self.lengthCompare(len)
- override def size = self.size
override def isDefinedAt(x: Int): Boolean = self.isDefinedAt(x)
override def segmentLength(p: A => Boolean, from: Int): Int = self.segmentLength(p, from)
override def prefixLength(p: A => Boolean) = self.prefixLength(p)
@@ -40,22 +39,34 @@ trait SeqProxyLike[+A, +This <: SeqLike[A, This] with Seq[A]] extends SeqLike[A,
override def lastIndexOf[B >: A](elem: B, end: Int): Int = self.lastIndexWhere(elem ==, end)
override def lastIndexWhere(p: A => Boolean): Int = self.lastIndexWhere(p, length - 1)
override def lastIndexWhere(p: A => Boolean, end: Int): Int = self.lastIndexWhere(p)
- override def reverse: This = self.reverse
+ override def reverse: Repr = self.reverse
+ override def reverseMap[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.reverseMap(f)(bf)
override def reverseIterator: Iterator[A] = self.reverseIterator
override def startsWith[B](that: Seq[B], offset: Int): Boolean = self.startsWith(that, offset)
override def startsWith[B](that: Seq[B]): Boolean = self.startsWith(that)
override def endsWith[B](that: Seq[B]): Boolean = self.endsWith(that)
override def indexOfSlice[B >: A](that: Seq[B]): Int = self.indexOfSlice(that)
+ override def indexOfSlice[B >: A](that: Seq[B], from: Int): Int = self.indexOfSlice(that)
+ override def lastIndexOfSlice[B >: A](that: Seq[B]): Int = self.lastIndexOfSlice(that)
+ override def lastIndexOfSlice[B >: A](that: Seq[B], end: Int): Int = self.lastIndexOfSlice(that, end)
+ override def containsSlice[B](that: Seq[B]): Boolean = self.indexOfSlice(that) != -1
override def contains(elem: Any): Boolean = self.contains(elem)
- override def union[B >: A, That](that: Seq[B])(implicit bf: CanBuildFrom[This, B, That]): That = self.union(that)(bf)
- override def diff[B >: A, That](that: Seq[B]): This = self.diff(that)
- override def intersect[B >: A, That](that: Seq[B]): This = self.intersect(that)
- override def removeDuplicates: This = self.removeDuplicates
- override def patch[B >: A, That](from: Int, patch: Seq[B], replaced: Int)(implicit bf: CanBuildFrom[This, B, That]): That = self.patch(from, patch, replaced)(bf)
- override def padTo[B >: A, That](len: Int, elem: B)(implicit bf: CanBuildFrom[This, B, That]): That = self.padTo(len, elem)(bf)
+ override def union[B >: A, That](that: Seq[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = self.union(that)(bf)
+ override def diff[B >: A](that: Seq[B]): Repr = self.diff(that)
+ override def intersect[B >: A](that: Seq[B]): Repr = self.intersect(that)
+ override def distinct: Repr = self.distinct
+ override def patch[B >: A, That](from: Int, patch: Seq[B], replaced: Int)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.patch(from, patch, replaced)(bf)
+ override def updated[B >: A, That](index: Int, elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.updated(index, elem)(bf)
+ override def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.+:(elem)(bf)
+ override def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.:+(elem)(bf)
+ override def padTo[B >: A, That](len: Int, elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.padTo(len, elem)(bf)
+ override def corresponds[B](that: Seq[B])(p: (A,B) => Boolean): Boolean = self.corresponds(that)(p)
+ override def sortWith(lt: (A, A) => Boolean): Repr = self.sortWith(lt)
+ override def sortBy[B](f: A => B)(implicit ord: Ordering[B]): Repr = self.sortBy(f)(ord)
+ override def sorted[B >: A](implicit ord: Ordering[B]): Repr = self.sorted(ord)
override def indices: Range = self.indices
override def view = self.view
override def view(from: Int, until: Int) = self.view(from, until)
- override def equalsWith[B](that: Seq[B])(f: (A,B) => Boolean): Boolean = (self zip that) forall { case (x,y) => f(x,y) }
- override def containsSlice[B](that: Seq[B]): Boolean = self.indexOfSlice(that) != -1
}
+
+
diff --git a/src/library/scala/collection/SeqView.scala b/src/library/scala/collection/SeqView.scala
index 61443b3b90..79d50f1de6 100644
--- a/src/library/scala/collection/SeqView.scala
+++ b/src/library/scala/collection/SeqView.scala
@@ -21,6 +21,8 @@ import TraversableView.NoBuilder
*/
trait SeqView[+A, +Coll] extends SeqViewLike[A, Coll, SeqView[A, Coll]]
+/** $factoryInfo
+ */
object SeqView {
type Coll = TraversableView[_, C] forSome {type C <: Traversable[_]}
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, SeqView[A, Seq[_]]] =
diff --git a/src/library/scala/collection/SeqViewLike.scala b/src/library/scala/collection/SeqViewLike.scala
index 1a8cd20013..7014833a46 100644
--- a/src/library/scala/collection/SeqViewLike.scala
+++ b/src/library/scala/collection/SeqViewLike.scala
@@ -21,8 +21,8 @@ import TraversableView.NoBuilder
* @version 2.8
*/
trait SeqViewLike[+A,
- +Coll,
- +This <: SeqView[A, Coll] with SeqViewLike[A, Coll, This]]
+ +Coll,
+ +This <: SeqView[A, Coll] with SeqViewLike[A, Coll, This]]
extends Seq[A] with SeqLike[A, This] with IterableView[A, Coll] with IterableViewLike[A, Coll, This]
{ self =>
@@ -31,6 +31,11 @@ trait SeqViewLike[+A,
override def apply(idx: Int): B
}
+ trait Forced[B] extends Transformed[B] with super.Forced[B] {
+ override def length = forced.length
+ override def apply(idx: Int) = forced.apply(idx)
+ }
+
trait Sliced extends Transformed[A] with super.Sliced {
override def length = ((until min self.length) - from) max 0
override def apply(idx: Int): A =
@@ -104,7 +109,8 @@ trait SeqViewLike[+A,
trait Zipped[B] extends Transformed[(A, B)] with super.Zipped[B] {
protected[this] lazy val thatSeq = other.toSeq
- override def length: Int = self.length min thatSeq.length
+ /* Have to be careful here - other may be an infinite sequence. */
+ override def length = if ((thatSeq lengthCompare self.length) <= 0) thatSeq.length else self.length
override def apply(idx: Int) = (self.apply(idx), thatSeq.apply(idx))
}
@@ -143,9 +149,20 @@ trait SeqViewLike[+A,
override def stringPrefix = self.stringPrefix+"P"
}
+ trait Prepended[B >: A] extends Transformed[B] {
+ protected[this] val fst: B
+ override def iterator: Iterator[B] = Iterator.single(fst) ++ self.iterator
+ override def length: Int = 1 + self.length
+ override def apply(idx: Int): B =
+ if (idx == 0) fst
+ else self.apply(idx - 1)
+ override def stringPrefix = self.stringPrefix+"A"
+ }
+
/** Boilerplate method, to override in each subclass
* This method could be eliminated if Scala had virtual classes
*/
+ protected override def newForced[B](xs: => Seq[B]): Transformed[B] = new Forced[B] { val forced = xs }
protected override def newAppended[B >: A](that: Traversable[B]): Transformed[B] = new Appended[B] { val rest = that }
protected override def newMapped[B](f: A => B): Transformed[B] = new Mapped[B] { val mapping = f }
protected override def newFlatMapped[B](f: A => Traversable[B]): Transformed[B] = new FlatMapped[B] { val mapping = f }
@@ -157,6 +174,7 @@ trait SeqViewLike[+A,
protected override def newZippedAll[A1 >: A, B](that: Iterable[B], _thisElem: A1, _thatElem: B): Transformed[(A1, B)] = new ZippedAll[A1, B] { val other = that; val thisElem = _thisElem; val thatElem = _thatElem }
protected def newReversed: Transformed[A] = new Reversed { }
protected def newPatched[B >: A](_from: Int, _patch: Seq[B], _replaced: Int): Transformed[B] = new Patched[B] { val from = _from; val patch = _patch; val replaced = _replaced }
+ protected def newPrepended[B >: A](elem: B): Transformed[B] = new Prepended[B] { protected[this] val fst = elem }
override def reverse: This = newReversed.asInstanceOf[This]
@@ -167,14 +185,35 @@ trait SeqViewLike[+A,
// else super.patch[B, That](from, patch, replaced)(bf)
}
- //TR TODO: updated, +: ed :+ ed
-
override def padTo[B >: A, That](len: Int, elem: B)(implicit bf: CanBuildFrom[This, B, That]): That =
patch(length, fill(len - length)(elem), 0)
override def reverseMap[B, That](f: A => B)(implicit bf: CanBuildFrom[This, B, That]): That =
reverse.map(f)
+ override def updated[B >: A, That](index: Int, elem: B)(implicit bf: CanBuildFrom[This, B, That]): That = {
+ require(0 <= index && index < length)
+ patch(index, List(elem), 1)(bf)
+ }
+
+ override def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[This, B, That]): That =
+ newPrepended(elem).asInstanceOf[That]
+
+ override def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[This, B, That]): That =
+ ++(Iterator.single(elem))(bf)
+
+ override def union[B >: A, That](that: Seq[B])(implicit bf: CanBuildFrom[This, B, That]): That =
+ newForced(thisSeq union that).asInstanceOf[That]
+
+ override def diff[B >: A](that: Seq[B]): This =
+ newForced(thisSeq diff that).asInstanceOf[This]
+
+ override def intersect[B >: A](that: Seq[B]): This =
+ newForced(thisSeq intersect that).asInstanceOf[This]
+
+ override def sorted[B >: A](implicit ord: Ordering[B]): This =
+ newForced(thisSeq sorted ord).asInstanceOf[This]
+
override def stringPrefix = "SeqView"
}
diff --git a/src/library/scala/collection/SetLike.scala b/src/library/scala/collection/SetLike.scala
index 156d0d8b2b..48b5358afc 100644
--- a/src/library/scala/collection/SetLike.scala
+++ b/src/library/scala/collection/SetLike.scala
@@ -187,7 +187,7 @@ self =>
* @note This operation contains an unchecked cast: if `that`
* is a set, it will assume with an unchecked cast
* that it has the same element type as this set.
- * Any subsequuent ClassCastException is treated as a `false` result.
+ * Any subsequent ClassCastException is treated as a `false` result.
*/
override def equals(that: Any): Boolean = that match {
case that: Set[_] =>
diff --git a/src/library/scala/collection/SortedMap.scala b/src/library/scala/collection/SortedMap.scala
index 24f363243f..7b0d35220e 100644
--- a/src/library/scala/collection/SortedMap.scala
+++ b/src/library/scala/collection/SortedMap.scala
@@ -21,9 +21,9 @@ import mutable.Builder
*/
trait SortedMap[A, +B] extends Map[A, B] with SortedMapLike[A, B, SortedMap[A, B]] {
/** Needs to be overridden in subclasses. */
- override def empty = SortedMap.empty[A, B]
+ override def empty: SortedMap[A, B] = SortedMap.empty[A, B]
- override protected[this] def newBuilder : Builder[(A, B), SortedMap[A, B]] =
+ override protected[this] def newBuilder: Builder[(A, B), SortedMap[A, B]] =
immutable.SortedMap.newBuilder[A, B]
}
diff --git a/src/library/scala/collection/Traversable.scala b/src/library/scala/collection/Traversable.scala
index c3d7fa8bc7..1cecec5227 100644
--- a/src/library/scala/collection/Traversable.scala
+++ b/src/library/scala/collection/Traversable.scala
@@ -29,8 +29,7 @@ trait Traversable[+A] extends TraversableLike[A, Traversable[A]]
override def isEmpty: Boolean
override def size: Int
override def hasDefiniteSize
- override def ++[B >: A, That](that: Traversable[B])(implicit bf: CanBuildFrom[Traversable[A], B, That]): That
- override def ++[B >: A, That](that: Iterator[B])(implicit bf: CanBuildFrom[Traversable[A], B, That]): That
+ override def ++[B >: A, That](xs: TraversableOnce[B])(implicit bf: CanBuildFrom[Traversable[A], B, That]): That
override def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Traversable[A], B, That]): That
override def flatMap[B, That](f: A => Traversable[B])(implicit bf: CanBuildFrom[Traversable[A], B, That]): That
override def filter(p: A => Boolean): Traversable[A]
diff --git a/src/library/scala/collection/TraversableLike.scala b/src/library/scala/collection/TraversableLike.scala
index fc666ddb92..7008d3b5fd 100644
--- a/src/library/scala/collection/TraversableLike.scala
+++ b/src/library/scala/collection/TraversableLike.scala
@@ -88,8 +88,8 @@ import immutable.{List, Stream, Nil, ::}
*
* Note: will not terminate for infinite-sized collections.
*/
-trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr] {
-self =>
+trait TraversableLike[+A, +Repr] extends HasNewBuilder[A, Repr] with TraversableOnce[A] {
+ self =>
import Traversable.breaks._
@@ -148,23 +148,10 @@ self =>
result
}
- /** Tests whether the $coll is not empty.
- *
- * @return `true` if the $coll contains at least one element, `false` otherwise.
+ /** Tests whether this $coll can be repeatedly traversed.
+ * @return `true`
*/
- def nonEmpty: Boolean = !isEmpty
-
- /** The size of this $coll.
- *
- * $willNotTerminateInf
- *
- * @return the number of elements in this $coll.
- */
- def size: Int = {
- var result = 0
- for (x <- this) result += 1
- result
- }
+ final def isTraversableAgain = true
/** Tests whether this $coll is known to have a finite size.
* All strict collections are known to have finite size. For a non-strict collection
@@ -186,36 +173,15 @@ self =>
* @return a new collection of type `That` which contains all elements of this $coll
* followed by all elements of `that`.
*
- * @usecase def ++(that: Traversable[A]): $Coll[A]
- *
- * @return a new $coll which contains all elements of this $coll
- * followed by all elements of `that`.
- */
- def ++[B >: A, That](that: Traversable[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = {
- val b = bf(repr)
- b ++= thisCollection
- b ++= that
- b.result
- }
-
- /** Concatenates this $coll with the elements of an iterator.
- *
- * @param that the iterator to append.
- * @tparam B the element type of the returned collection.
- * @tparam That $thatinfo
- * @param bf $bfinfo
- * @return a new collection of type `That` which contains all elements of this $coll
- * followed by all elements of `that`.
- *
- * @usecase def ++(that: Iterator[A]): $Coll[A]
+ * @usecase def ++(that: TraversableOnce[A]): $Coll[A]
*
* @return a new $coll which contains all elements of this $coll
* followed by all elements of `that`.
*/
- def ++[B >: A, That](that: Iterator[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = {
+ def ++[B >: A, That](xs: TraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
b ++= thisCollection
- b ++= that
+ b ++= xs
b.result
}
@@ -292,13 +258,13 @@ self =>
* `pf` to each element on which it is defined and collecting the results.
* The order of the elements is preserved.
*
- * @usecase def partialMap[B](pf: PartialFunction[A, B]): $Coll[B]
+ * @usecase def collect[B](pf: PartialFunction[A, B]): $Coll[B]
*
* @return a new $coll resulting from applying the given partial function
* `pf` to each element on which it is defined and collecting the results.
* The order of the elements is preserved.
*/
- def partialMap[B, That](pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[Repr, B, That]): That = {
+ def collect[B, That](pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
for (x <- this) if (pf.isDefinedAt(x)) b += pf(x)
b.result
@@ -409,21 +375,6 @@ self =>
result
}
- /** Counts the number of elements in the $coll which satisfy a predicate.
- *
- * @param p the predicate used to test elements.
- * @return the number of elements satisfying the predicate `p`.
- *
- *
- */
- def count(p: A => Boolean): Int = {
- var cnt = 0
- for (x <- this) {
- if (p(x)) cnt += 1
- }
- cnt
- }
-
/** Finds the first element of the $coll satisfying a predicate, if any.
*
* $mayNotTerminateInf
@@ -464,227 +415,44 @@ self =>
}
*/
- /** Applies a binary operator to a start value and all elements of this $coll, going left to right.
- *
- * $willNotTerminateInf
- * $orderDependentFold
- *
- * @param z the start value.
- * @param op the binary operator.
- * @tparam B the result type of the binary operator.
- * @return the result of inserting `op` between consecutive elements of this $coll$,
- * going left to right with the start value `z` on the left:
- * {{{
- * op(...op(z, x,,1,,), x,,2,,, ..., x,,n,,)
- * }}}
- * where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
- */
- def foldLeft[B](z: B)(op: (B, A) => B): B = {
- var result = z
- for (x <- this)
- result = op(result, x)
- result
- }
-
- /** Applies a binary operator to a start value and all elements of this $coll, going left to right.
- *
- * Note: `/:` is alternate syntax for `foldLeft`; `z /: xs` is the same as `xs foldLeft z`.
- * $willNotTerminateInf
- * $orderDependentFold
- *
- * @param z the start value.
- * @param op the binary operator.
- * @tparam B the result type of the binary operator.
- * @return the result of inserting `op` between consecutive elements of this $coll$,
- * going left to right with the start value `z` on the left:
- * {{{
- * op(...op(op(z, x,,1,,), x,,2,,), ..., x,,n,,)
- * }}}
- * where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
- */
- def /: [B](z: B)(op: (B, A) => B): B = foldLeft(z)(op)
-
- /** Applies a binary operator to all elements of this $coll and a start value, going right to left.
- *
- * $willNotTerminateInf
- * $orderDependentFold
- * @param z the start value.
- * @param op the binary operator.
- * @tparam B the result type of the binary operator.
- * @return the result of inserting `op` between consecutive elements of this $coll$,
- * going right to left with the start value `z` on the right:
- * {{{
- * op(x,,1,,, op(x,,2,,, ... op(x,,n,,, z)...))
- * }}}
- * where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
- */
- def foldRight[B](z: B)(op: (A, B) => B): B = {
- var elems: List[A] = Nil
- for (x <- this) elems = x :: elems
- elems.foldLeft(z)((x, y) => op(y, x))
- }
-
- /** Applies a binary operator to all elements of this $coll and a start value, going right to left.
- *
- * Note: `:\` is alternate syntax for `foldRight`; `xs :\ z` is the same as `xs foldRight z`.
- * $willNotTerminateInf
- * $orderDependentFold
- *
- * @param z the start value
- * @param op the binary operator
- * @tparam B the result type of the binary operator.
- * @return the result of inserting `op` between consecutive elements of this $coll$,
- * going right to left with the start value `z` on the right:
- * {{{
- * op(x,,1,,, op(x,,2,,, ... op(x,,n,,, z)...))
- * }}}
- * where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
- */
- def :\ [B](z: B)(op: (A, B) => B): B = foldRight(z)(op)
-
- /** Applies a binary operator to all elements of this $coll, going left to right.
- * $willNotTerminateInf
- * $orderDependentFold
- *
- * @param op the binary operator.
- * @tparam B the result type of the binary operator.
- * @return the result of inserting `op` between consecutive elements of this $coll$,
- * going left to right:
- * {{{
- * op(...(op(x,,1,,, x,,2,,), ... ) , x,,n,,)
- * }}}
- * where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
- * @throws `UnsupportedOperationException` if this $coll is empty.
- */
- def reduceLeft[B >: A](op: (B, A) => B): B = {
- if (isEmpty) throw new UnsupportedOperationException("empty.reduceLeft")
- var result: B = head
- var first = true
- for (x <- this)
- if (first) first = false
- else result = op(result, x)
- result
- }
-
- /** Optionally applies a binary operator to all elements of this $coll, going left to right.
- * $willNotTerminateInf
- * $orderDependentFold
- *
- * @param op the binary operator.
- * @tparam B the result type of the binary operator.
- * @return an option value containing the result of `reduceLeft(op)` is this $coll is nonempty,
- * `None` otherwise.
- */
- def reduceLeftOption[B >: A](op: (B, A) => B): Option[B] = {
- if (isEmpty) None else Some(reduceLeft(op))
- }
-
- /** Applies a binary operator to all elements of this $coll, going right to left.
- * $willNotTerminateInf
- * $orderDependentFold
- *
- * @param op the binary operator.
- * @tparam B the result type of the binary operator.
- * @return the result of inserting `op` between consecutive elements of this $coll$,
- * going right to left:
- * {{{
- * op(x,,1,,, op(x,,2,,, ..., op(x,,n-1,,, x,,n,,)...))
- * }}}
- * where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
- * @throws `UnsupportedOperationException` if this $coll is empty.
- */
- def reduceRight[B >: A](op: (A, B) => B): B = {
- if (isEmpty) throw new UnsupportedOperationException("empty.reduceRight")
- var elems: List[A] = Nil
- for (x <- this) elems = x :: elems
- elems.reduceLeft[B]((x, y) => op(y, x))
- }
-
- /** Optionally applies a binary operator to all elements of this $coll, going right to left.
- * $willNotTerminateInf
- * $orderDependentFold
- *
- * @param op the binary operator.
- * @tparam B the result type of the binary operator.
- * @return an option value containing the result of `reduceRight(op)` is this $coll is nonempty,
- * `None` otherwise.
- */
- def reduceRightOption[B >: A](op: (A, B) => B): Option[B] =
- if (isEmpty) None else Some(reduceRight(op))
-
- /** Sums up the elements of this collection.
- *
- * @param num an implicit parameter defining a set of numeric operations
- * which includes the `+` operator to be used in forming the sum.
- * @tparam B the result type of the `+` operator.
- * @return the sum of all elements of this $coll with respect to the `+` operator in `num`.
- *
- * @usecase def sum: Int
- *
- * @return the sum of all elements in this $coll of numbers of type `Int`.
- * Instead of `Int`, any other type `T` with an implicit `Numeric[T]` implementation
- * can be used as element type of the $coll and as result type of `sum`.
- * Examples of such types are: `Long`, `Float`, `Double`, `BigInt`.
- *
- */
- def sum[B >: A](implicit num: Numeric[B]): B = {
- var acc = num.zero
- for (x <- self) acc = num.plus(acc, x)
- acc
- }
-
- /** Multiplies up the elements of this collection.
- *
- * @param num an implicit parameter defining a set of numeric operations
- * which includes the `*` operator to be used in forming the product.
- * @tparam B the result type of the `*` operator.
- * @return the product of all elements of this $coll with respect to the `*` operator in `num`.
- *
- * @usecase def product: Int
- *
- * @return the product of all elements in this $coll of numbers of type `Int`.
- * Instead of `Int`, any other type `T` with an implicit `Numeric[T]` implementation
- * can be used as element type of the $coll and as result type of `product`.
- * Examples of such types are: `Long`, `Float`, `Double`, `BigInt`.
- */
- def product[B >: A](implicit num: Numeric[B]): B = {
- var acc = num.one
- for (x <- self) acc = num.times(acc, x)
- acc
- }
-
- /** Finds the smallest element.
- *
- * @param cmp An ordering to be used for comparing elements.
- * @tparam B The type over which the ordering is defined.
- * @return the smallest element of this $coll with respect to the ordering `cmp`.
+ /**
+ * Produces a collection containing cummulative results of applying the operator going left to right.
+ * $willNotTerminateInf
+ * $orderDependent
*
- * @usecase def min: A
- * @return the smallest element of this $coll
+ * @tparam B the type of the elements in the resulting collection
+ * @tparam That the actual type of the resulting collection
+ * @param z the initial value
+ * @param op the binary operator applied to the intermediate result and the element
+ * @param bf $bfinfo
+ * @return collection with intermediate results
*/
- def min[B >: A](implicit cmp: Ordering[B]): A = {
- if (isEmpty) throw new UnsupportedOperationException("empty.min")
- var acc = self.head
- for (x <- self)
- if (cmp.lt(x, acc)) acc = x
- acc
+ def scanLeft[B, That](z: B)(op: (B, A) => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
+ val b = bf(repr)
+ var acc = z
+ b += acc
+ for (x <- this) { acc = op(acc, x); b += acc }
+ b.result
}
- /** Finds the largest element.
- *
- * @param cmp An ordering to be used for comparing elements.
- * @tparam B The type over which the ordering is defined.
- * @return the largest element of this $coll with respect to the ordering `cmp`.
+ /**
+ * Produces a collection containing cummulative results of applying the operator going right to left.
+ * $willNotTerminateInf
+ * $orderDependent
*
- * @usecase def min: A
- * @return the largest element of this $coll.
+ * @tparam B the type of the elements in the resulting collection
+ * @tparam That the actual type of the resulting collection
+ * @param z the initial value
+ * @param op the binary operator applied to the intermediate result and the element
+ * @param bf $bfinfo
+ * @return collection with intermediate results
*/
- def max[B >: A](implicit cmp: Ordering[B]): A = {
- if (isEmpty) throw new UnsupportedOperationException("empty.max")
- var acc = self.head
- for (x <- self)
- if (cmp.gt(x, acc)) acc = x
- acc
+ def scanRight[B, That](z: B)(op: (A, B) => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
+ val b = bf(repr)
+ var acc = z
+ b += acc
+ for (x <- reversed) { acc = op(x, acc); b += acc }
+ b.result
}
/** Selects the first element of this $coll.
@@ -849,7 +617,7 @@ self =>
b.result
}
- /** Spits this $coll into a prefix/suffix pair according to a predicate.
+ /** Splits this $coll into a prefix/suffix pair according to a predicate.
*
* Note: `c span p` is equivalent to (but possibly more efficient than)
* `(c takeWhile p, c dropWhile p)`, provided the evaluation of the predicate `p`
@@ -889,14 +657,6 @@ self =>
(l.result, r.result)
}
- /** Copies all elements of this $coll to a buffer.
- * $willNotTerminateInf
- * @param dest The buffer to which elements are copied.
- */
- def copyToBuffer[B >: A](dest: Buffer[B]) {
- for (x <- this) dest += x
- }
-
/** Copies elements of this $coll to an array.
* Fills the given array `xs` with at most `len` elements of
* this $coll, starting at position `start`.
@@ -925,188 +685,11 @@ self =>
}
}
- /** Copies elements of this $coll to an array.
- * Fills the given array `xs` with all elements of
- * this $coll, starting at position `start`.
- * Copying will stop once either the end of the current $coll is reached,
- * or the end of the array is reached.
- *
- * $willNotTerminateInf
- *
- * @param xs the array to fill.
- * @param start the starting index.
- * @tparam B the type of the elements of the array.
- *
- * @usecase def copyToArray(xs: Array[A], start: Int): Unit
- */
- def copyToArray[B >: A](xs: Array[B], start: Int) {
- copyToArray(xs, start, xs.length - start)
- }
-
- /** Copies elements of this $coll to an array.
- * Fills the given array `xs` with all elements of
- * this $coll, starting at position `0`.
- * Copying will stop once either the end of the current $coll is reached,
- * or the end of the array is reached.
- *
- * $willNotTerminateInf
- *
- * @param xs the array to fill.
- * @tparam B the type of the elements of the array.
- *
- * @usecase def copyToArray(xs: Array[A], start: Int): Unit
- */
- def copyToArray[B >: A](xs: Array[B]) {
- copyToArray(xs, 0)
- }
-
- /** Converts this $coll to an array.
- * $willNotTerminateInf
- *
- * @tparam B the type of the elements of the array. A `ClassManifest` for this type must
- * be available.
- * @return an array containing all elements of this $coll.
- *
- * @usecase def toArray: Array[A]
- * @return an array containing all elements of this $coll.
- * A `ClassManifest` must be available for the element type of this $coll.
- */
- def toArray[B >: A : ClassManifest]: Array[B] = {
- val result = new Array[B](size)
- copyToArray(result, 0)
- result
- }
-
- /** Converts this $coll to a list.
- * $willNotTerminateInf
- * @return a list containing all elements of this $coll.
- */
- def toList: List[A] = (new ListBuffer[A] ++= thisCollection).toList
-
- /** Converts this $coll to an iterable collection.
- * $willNotTerminateInf
- * @return an `Iterable` containing all elements of this $coll.
- */
- def toIterable: Iterable[A] = toStream
-
- /** Converts this $coll to a sequence.
- * $willNotTerminateInf
- * @return a sequence containing all elements of this $coll.
- */
- def toSeq: Seq[A] = toList
-
- /** Converts this $coll to an indexed sequence.
- * $willNotTerminateInf
- * @return an indexed sequence containing all elements of this $coll.
- */
- def toIndexedSeq[B >: A]: mutable.IndexedSeq[B] = (new ArrayBuffer[B] ++= thisCollection)
-
- /** Converts this $coll to a stream.
- * $willNotTerminateInf
- * @return a stream containing all elements of this $coll.
- */
- def toStream: Stream[A] = toList.toStream
-
- /** Converts this $coll to a set.
- * $willNotTerminateInf
- * @return a set containing all elements of this $coll.
- */
- def toSet[B >: A]: immutable.Set[B] = immutable.Set() ++ thisCollection
-
- /** Converts this $coll to a map. This method is unavailable unless
- * the elements are members of Tuple2, each ((K, V)) becoming a key-value
- * pair in the map. Duplicate keys will be overwritten by later keys:
- * if this is an unordered collection, which key is in the resulting map
- * is undefined.
- * $willNotTerminateInf
- * @return a map containing all elements of this $coll.
- */
- def toMap[T, U](implicit ev: A <:< (T, U)): immutable.Map[T, U] = {
- val b = immutable.Map.newBuilder[T, U]
- for (x <- this)
- b += x
-
- b.result
- }
-
- /** Displays all elements of this $coll in a string using start, end, and separator strings.
- *
- * @param start the starting string.
- * @param sep the separator string.
- * @param end the ending string.
- * @return a string representation of this $coll. The resulting string
- * begins with the string `start` and ends with the string
- * `end`. Inside, the string representations (w.r.t. the method `toString`)
- * of all elements of this $coll are separated by the string `sep`.
- *
- * @ex `List(1, 2, 3).mkString("(", "; ", ")") = "(1; 2; 3)"`
- */
- def mkString(start: String, sep: String, end: String): String =
- addString(new StringBuilder(), start, sep, end).toString
-
- /** Displays all elements of this $coll in a string using a separator string.
- *
- * @param sep the separator string.
- * @return a string representation of this $coll. In the resulting string
- * the string representations (w.r.t. the method `toString`)
- * of all elements of this $coll are separated by the string `sep`.
- *
- * @ex `List(1, 2, 3).mkString("|") = "1|2|3"`
- */
- def mkString(sep: String): String =
- addString(new StringBuilder(), sep).toString
-
- /** Displays all elements of this $coll in a string.
- * @return a string representation of this $coll. In the resulting string
- * the string representations (w.r.t. the method `toString`)
- * of all elements of this $coll follow each other without any separator string.
- */
- def mkString: String =
- addString(new StringBuilder()).toString
-
- /** Appends all elements of this $coll to a string builder using start, end, and separator strings.
- * The written text begins with the string `start` and ends with the string
- * `end`. Inside, the string representations (w.r.t. the method `toString`)
- * of all elements of this $coll are separated by the string `sep`.
- *
- * @param b the string builder to which elements are appended.
- * @param start the starting string.
- * @param sep the separator string.
- * @param end the ending string.
- * @return the string builder `b` to which elements were appended.
- */
- def addString(b: StringBuilder, start: String, sep: String, end: String): StringBuilder = {
- b append start
- var first = true
- for (x <- this) {
- if (first) first = false
- else b append sep
- b append x
- }
- b append end
- }
-
- /** Appends all elements of this $coll to a string builder using a separator string.
- * The written text consists of the string representations (w.r.t. the method `toString`)
- * of all elements of this $coll, separated by the string `sep`.
- *
- * @param b the string builder to which elements are appended.
- * @param sep the separator string.
- * @return the string builder `b` to which elements were appended.
- */
- def addString(b: StringBuilder, sep: String): StringBuilder = addString(b, "", sep, "")
-
- /** Appends all elements of this $coll to a string builder.
- * The written text consists of the string representations (w.r.t. the method `toString`)
- * of all elements of this $coll without any separator string.
- *
- * @param b the string builder to which elements are appended.
- * @return the string builder `b` to which elements were appended.
- */
- def addString(b: StringBuilder): StringBuilder = addString(b, "")
+ def toTraversable: Traversable[A] = thisCollection
+ def toIterator: Iterator[A] = toIterable.iterator
/** Converts this $coll to a string
- * @returns a string representation of this collection. By default this
+ * @return a string representation of this collection. By default this
* string consists of the `stringPrefix` of this $coll,
* followed by all elements separated by commas and enclosed in parentheses.
*/
@@ -1131,7 +714,7 @@ self =>
*/
def view = new TraversableView[A, Repr] {
protected lazy val underlying = self.repr
- override def foreach[B](f: A => B) = self foreach f
+ override def foreach[U](f: A => U) = self foreach f
}
/** Creates a non-strict view of a slice of this $coll.
diff --git a/src/library/scala/collection/TraversableOnce.scala b/src/library/scala/collection/TraversableOnce.scala
new file mode 100644
index 0000000000..6e4917b77e
--- /dev/null
+++ b/src/library/scala/collection/TraversableOnce.scala
@@ -0,0 +1,522 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+
+import mutable.{ Buffer, ListBuffer, ArrayBuffer }
+
+/** A template trait for collections which can be traversed one
+ * or more times.
+ * $traversableonceinfo
+ *
+ * @tparam A the element type of the collection
+ *
+ * @define traversableonceinfo
+ * This trait is composed of those methods which can be implemented
+ * solely in terms of foreach and which do not need access to a Builder.
+ * It represents the implementations common to Iterators and
+ * Traversables, such as folds, conversions, and other operations which
+ * traverse some or all of the elements and return a derived value.
+ *
+ * @author Martin Odersky
+ * @author Paul Phillips
+ * @version 2.8
+ * @since 2.8
+ *
+ * @define coll traversable or iterator
+ * @define orderDependentFold
+ *
+ * Note: might return different results for different runs, unless the underlying collection type is ordered.
+ * or the operator is associative and commutative.
+ * @define willNotTerminateInf
+ *
+ * Note: will not terminate for infinite-sized collections.
+ */
+trait TraversableOnce[+A] {
+ self =>
+
+ /** Self-documenting abstract methods. */
+ def foreach[U](f: A => U): Unit
+ def isEmpty: Boolean
+ def hasDefiniteSize: Boolean
+
+ /** Tests whether this $coll can be repeatedly traversed. Always
+ * true for Traversables and false for Iterators unless overridden.
+ *
+ * @return `true` if it is repeatedly traversable, `false` otherwise.
+ */
+ def isTraversableAgain: Boolean
+
+ /** Returns an Iterator over the elements in this $coll. Will return
+ * the same Iterator if this instance is already an Iterator.
+ * $willNotTerminateInf
+ * @return an Iterator containing all elements of this $coll.
+ */
+ def toIterator: Iterator[A]
+
+ /** Converts this $coll to an unspecified Traversable. Will return
+ * the same collection if this instance is already Traversable.
+ * $willNotTerminateInf
+ * @return a Traversable containing all elements of this $coll.
+ */
+ def toTraversable: Traversable[A]
+
+ /** Presently these are abstract because the Traversable versions use
+ * breakable/break, and I wasn't sure enough of how that's supposed to
+ * function to consolidate them with the Iterator versions.
+ */
+ def forall(p: A => Boolean): Boolean
+ def exists(p: A => Boolean): Boolean
+ def find(p: A => Boolean): Option[A]
+ def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Unit
+ // def mapFind[B](f: A => Option[B]): Option[B]
+
+ // for internal use
+ protected[this] def reversed = {
+ var elems: List[A] = Nil
+ self foreach (elems ::= _)
+ elems
+ }
+
+ /** The size of this $coll.
+ *
+ * $willNotTerminateInf
+ *
+ * @return the number of elements in this $coll.
+ */
+ def size: Int = {
+ var result = 0
+ for (x <- self) result += 1
+ result
+ }
+
+ /** Tests whether the $coll is not empty.
+ *
+ * @return `true` if the $coll contains at least one element, `false` otherwise.
+ */
+ def nonEmpty: Boolean = !isEmpty
+
+ /** Counts the number of elements in the $coll which satisfy a predicate.
+ *
+ * @param p the predicate used to test elements.
+ * @return the number of elements satisfying the predicate `p`.
+ */
+ def count(p: A => Boolean): Int = {
+ var cnt = 0
+ for (x <- this)
+ if (p(x)) cnt += 1
+
+ cnt
+ }
+
+ /** Applies a binary operator to a start value and all elements of this $coll, going left to right.
+ *
+ * Note: `/:` is alternate syntax for `foldLeft`; `z /: xs` is the same as `xs foldLeft z`.
+ * $willNotTerminateInf
+ * $orderDependentFold
+ *
+ * @param z the start value.
+ * @param op the binary operator.
+ * @tparam B the result type of the binary operator.
+ * @return the result of inserting `op` between consecutive elements of this $coll$,
+ * going left to right with the start value `z` on the left:
+ * {{{
+ * op(...op(op(z, x,,1,,), x,,2,,), ..., x,,n,,)
+ * }}}
+ * where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
+ */
+ def /:[B](z: B)(op: (B, A) => B): B = foldLeft(z)(op)
+
+ /** Applies a binary operator to all elements of this $coll and a start value, going right to left.
+ *
+ * Note: `:\` is alternate syntax for `foldRight`; `xs :\ z` is the same as `xs foldRight z`.
+ * $willNotTerminateInf
+ * $orderDependentFold
+ *
+ * @param z the start value
+ * @param op the binary operator
+ * @tparam B the result type of the binary operator.
+ * @return the result of inserting `op` between consecutive elements of this $coll$,
+ * going right to left with the start value `z` on the right:
+ * {{{
+ * op(x,,1,,, op(x,,2,,, ... op(x,,n,,, z)...))
+ * }}}
+ * where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
+ */
+ def :\[B](z: B)(op: (A, B) => B): B = foldRight(z)(op)
+
+ /** Applies a binary operator to a start value and all elements of this $coll, going left to right.
+ *
+ * $willNotTerminateInf
+ * $orderDependentFold
+ *
+ * @param z the start value.
+ * @param op the binary operator.
+ * @tparam B the result type of the binary operator.
+ * @return the result of inserting `op` between consecutive elements of this $coll$,
+ * going left to right with the start value `z` on the left:
+ * {{{
+ * op(...op(z, x,,1,,), x,,2,,, ..., x,,n,,)
+ * }}}
+ * where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
+ */
+ def foldLeft[B](z: B)(op: (B, A) => B): B = {
+ var result = z
+ this foreach (x => result = op(result, x))
+ result
+ }
+
+ /** Applies a binary operator to all elements of this $coll and a start value, going right to left.
+ *
+ * $willNotTerminateInf
+ * $orderDependentFold
+ * @param z the start value.
+ * @param op the binary operator.
+ * @tparam B the result type of the binary operator.
+ * @return the result of inserting `op` between consecutive elements of this $coll$,
+ * going right to left with the start value `z` on the right:
+ * {{{
+ * op(x,,1,,, op(x,,2,,, ... op(x,,n,,, z)...))
+ * }}}
+ * where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
+ */
+ def foldRight[B](z: B)(op: (A, B) => B): B =
+ reversed.foldLeft(z)((x, y) => op(y, x))
+
+ /** Applies a binary operator to all elements of this $coll, going left to right.
+ * $willNotTerminateInf
+ * $orderDependentFold
+ *
+ * @param op the binary operator.
+ * @tparam B the result type of the binary operator.
+ * @return the result of inserting `op` between consecutive elements of this $coll$,
+ * going left to right:
+ * {{{
+ * op(...(op(x,,1,,, x,,2,,), ... ) , x,,n,,)
+ * }}}
+ * where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
+ * @throws `UnsupportedOperationException` if this $coll is empty.
+ */
+ def reduceLeft[B >: A](op: (B, A) => B): B = {
+ if (isEmpty)
+ throw new UnsupportedOperationException("empty.reduceLeft")
+
+ var first = true
+ var acc: B = 0.asInstanceOf[B]
+
+ for (x <- self) {
+ if (first) {
+ acc = x
+ first = false
+ }
+ else acc = op(acc, x)
+ }
+ acc
+ }
+
+ /** Applies a binary operator to all elements of this $coll, going right to left.
+ * $willNotTerminateInf
+ * $orderDependentFold
+ *
+ * @param op the binary operator.
+ * @tparam B the result type of the binary operator.
+ * @return the result of inserting `op` between consecutive elements of this $coll$,
+ * going right to left:
+ * {{{
+ * op(x,,1,,, op(x,,2,,, ..., op(x,,n-1,,, x,,n,,)...))
+ * }}}
+ * where `x,,1,,, ..., x,,n,,` are the elements of this $coll.
+ * @throws `UnsupportedOperationException` if this $coll is empty.
+ */
+ def reduceRight[B >: A](op: (A, B) => B): B = {
+ if (isEmpty)
+ throw new UnsupportedOperationException("empty.reduceRight")
+
+ reversed.reduceLeft[B]((x, y) => op(y, x))
+ }
+
+ /** Optionally applies a binary operator to all elements of this $coll, going left to right.
+ * $willNotTerminateInf
+ * $orderDependentFold
+ *
+ * @param op the binary operator.
+ * @tparam B the result type of the binary operator.
+ * @return an option value containing the result of `reduceLeft(op)` is this $coll is nonempty,
+ * `None` otherwise.
+ */
+ def reduceLeftOption[B >: A](op: (B, A) => B): Option[B] =
+ if (isEmpty) None else Some(reduceLeft(op))
+
+ /** Optionally applies a binary operator to all elements of this $coll, going right to left.
+ * $willNotTerminateInf
+ * $orderDependentFold
+ *
+ * @param op the binary operator.
+ * @tparam B the result type of the binary operator.
+ * @return an option value containing the result of `reduceRight(op)` is this $coll is nonempty,
+ * `None` otherwise.
+ */
+ def reduceRightOption[B >: A](op: (A, B) => B): Option[B] =
+ if (isEmpty) None else Some(reduceRight(op))
+
+ /** Sums up the elements of this collection.
+ *
+ * @param num an implicit parameter defining a set of numeric operations
+ * which includes the `+` operator to be used in forming the sum.
+ * @tparam B the result type of the `+` operator.
+ * @return the sum of all elements of this $coll with respect to the `+` operator in `num`.
+ *
+ * @usecase def sum: Int
+ *
+ * @return the sum of all elements in this $coll of numbers of type `Int`.
+ * Instead of `Int`, any other type `T` with an implicit `Numeric[T]` implementation
+ * can be used as element type of the $coll and as result type of `sum`.
+ * Examples of such types are: `Long`, `Float`, `Double`, `BigInt`.
+ *
+ */
+ def sum[B >: A](implicit num: Numeric[B]): B = foldLeft(num.zero)(num.plus)
+
+ /** Multiplies up the elements of this collection.
+ *
+ * @param num an implicit parameter defining a set of numeric operations
+ * which includes the `*` operator to be used in forming the product.
+ * @tparam B the result type of the `*` operator.
+ * @return the product of all elements of this $coll with respect to the `*` operator in `num`.
+ *
+ * @usecase def product: Int
+ *
+ * @return the product of all elements in this $coll of numbers of type `Int`.
+ * Instead of `Int`, any other type `T` with an implicit `Numeric[T]` implementation
+ * can be used as element type of the $coll and as result type of `product`.
+ * Examples of such types are: `Long`, `Float`, `Double`, `BigInt`.
+ */
+ def product[B >: A](implicit num: Numeric[B]): B = foldLeft(num.one)(num.times)
+
+ /** Finds the smallest element.
+ *
+ * @param cmp An ordering to be used for comparing elements.
+ * @tparam B The type over which the ordering is defined.
+ * @return the smallest element of this $coll with respect to the ordering `cmp`.
+ *
+ * @usecase def min: A
+ * @return the smallest element of this $coll
+ */
+ def min[B >: A](implicit cmp: Ordering[B]): A = {
+ if (isEmpty)
+ throw new UnsupportedOperationException("empty.min")
+
+ reduceLeft((x, y) => if (cmp.lteq(x, y)) x else y)
+ }
+
+ /** Finds the largest element.
+ *
+ * @param cmp An ordering to be used for comparing elements.
+ * @tparam B The type over which the ordering is defined.
+ * @return the largest element of this $coll with respect to the ordering `cmp`.
+ *
+ * @usecase def max: A
+ * @return the largest element of this $coll.
+ */
+ def max[B >: A](implicit cmp: Ordering[B]): A = {
+ if (isEmpty)
+ throw new UnsupportedOperationException("empty.max")
+
+ reduceLeft((x, y) => if (cmp.gteq(x, y)) x else y)
+ }
+
+ /** Copies all elements of this $coll to a buffer.
+ * $willNotTerminateInf
+ * @param dest The buffer to which elements are copied.
+ */
+ def copyToBuffer[B >: A](dest: Buffer[B]): Unit = dest ++= self
+
+ /** Copies values produced by this iterator to an array.
+ * Fills the given array `xs` with values produced by this iterator, after skipping `start` values.
+ * Copying will stop once either the end of the current iterator is reached,
+ * or the end of the array is reached.
+ *
+ * $willNotTerminateInf
+ *
+ * @param xs the array to fill.
+ * @param start the starting index.
+ * @tparam B the type of the elements of the array.
+ *
+ * @usecase def copyToArray(xs: Array[A], start: Int, len: Int): Unit
+ */
+ def copyToArray[B >: A](xs: Array[B], start: Int): Unit =
+ copyToArray(xs, start, xs.length - start)
+
+ /** Copies values produced by this iterator to an array.
+ * Fills the given array `xs` with values produced by this iterator.
+ * Copying will stop once either the end of the current iterator is reached,
+ * or the end of the array is reached.
+ *
+ * $willNotTerminateInf
+ *
+ * @param xs the array to fill.
+ * @tparam B the type of the elements of the array.
+ *
+ * @usecase def copyToArray(xs: Array[A], start: Int, len: Int): Unit
+ */
+ def copyToArray[B >: A](xs: Array[B]): Unit =
+ copyToArray(xs, 0, xs.length)
+
+ /** Converts this $coll to an array.
+ * $willNotTerminateInf
+ *
+ * @tparam B the type of the elements of the array. A `ClassManifest` for this type must
+ * be available.
+ * @return an array containing all elements of this $coll.
+ *
+ * @usecase def toArray: Array[A]
+ * @return an array containing all elements of this $coll.
+ * A `ClassManifest` must be available for the element type of this $coll.
+ */
+ def toArray[B >: A : ClassManifest]: Array[B] = {
+ if (isTraversableAgain) {
+ val result = new Array[B](size)
+ copyToArray(result, 0)
+ result
+ }
+ else toStream.toArray
+ }
+
+ /** Converts this $coll to a list.
+ * $willNotTerminateInf
+ * @return a list containing all elements of this $coll.
+ */
+ def toList: List[A] = new ListBuffer[A] ++= self toList
+
+ /** Converts this $coll to an iterable collection.
+ * $willNotTerminateInf
+ * @return an `Iterable` containing all elements of this $coll.
+ */
+ def toIterable: Iterable[A] = toStream
+
+ /** Converts this $coll to a sequence.
+ * $willNotTerminateInf
+ * @return a sequence containing all elements of this $coll.
+ */
+ def toSeq: Seq[A] = toList
+
+ /** Converts this $coll to an indexed sequence.
+ * $willNotTerminateInf
+ * @return an indexed sequence containing all elements of this $coll.
+ */
+ def toIndexedSeq[B >: A]: mutable.IndexedSeq[B] = new ArrayBuffer[B] ++= self
+
+ /** Converts this $coll to a stream.
+ * $willNotTerminateInf
+ * @return a stream containing all elements of this $coll.
+ */
+ def toStream: Stream[A] = toList.toStream
+
+ /** Converts this $coll to a set.
+ * $willNotTerminateInf
+ * @return a set containing all elements of this $coll.
+ */
+ def toSet[B >: A]: immutable.Set[B] = immutable.Set() ++ self
+
+ /** Converts this $coll to a map. This method is unavailable unless
+ * the elements are members of Tuple2, each ((K, V)) becoming a key-value
+ * pair in the map. Duplicate keys will be overwritten by later keys:
+ * if this is an unordered collection, which key is in the resulting map
+ * is undefined.
+ * $willNotTerminateInf
+ * @return a map containing all elements of this $coll.
+ */
+ def toMap[T, U](implicit ev: A <:< (T, U)): immutable.Map[T, U] = {
+ val b = immutable.Map.newBuilder[T, U]
+ for (x <- self)
+ b += x
+
+ b.result
+ }
+
+ /** Displays all elements of this $coll in a string using start, end, and separator strings.
+ *
+ * @param start the starting string.
+ * @param sep the separator string.
+ * @param end the ending string.
+ * @return a string representation of this $coll. The resulting string
+ * begins with the string `start` and ends with the string
+ * `end`. Inside, the string representations (w.r.t. the method `toString`)
+ * of all elements of this $coll are separated by the string `sep`.
+ *
+ * @example `List(1, 2, 3).mkString("(", "; ", ")") = "(1; 2; 3)"`
+ */
+ def mkString(start: String, sep: String, end: String): String =
+ addString(new StringBuilder(), start, sep, end).toString
+
+ /** Displays all elements of this $coll in a string using a separator string.
+ *
+ * @param sep the separator string.
+ * @return a string representation of this $coll. In the resulting string
+ * the string representations (w.r.t. the method `toString`)
+ * of all elements of this $coll are separated by the string `sep`.
+ *
+ * @example `List(1, 2, 3).mkString("|") = "1|2|3"`
+ */
+ def mkString(sep: String): String = mkString("", sep, "")
+
+ /** Displays all elements of this $coll in a string.
+ * @return a string representation of this $coll. In the resulting string
+ * the string representations (w.r.t. the method `toString`)
+ * of all elements of this $coll follow each other without any separator string.
+ */
+ def mkString: String = mkString("")
+
+ /** Appends all elements of this $coll to a string builder using start, end, and separator strings.
+ * The written text begins with the string `start` and ends with the string
+ * `end`. Inside, the string representations (w.r.t. the method `toString`)
+ * of all elements of this $coll are separated by the string `sep`.
+ *
+ * @param b the string builder to which elements are appended.
+ * @param start the starting string.
+ * @param sep the separator string.
+ * @param end the ending string.
+ * @return the string builder `b` to which elements were appended.
+ */
+ def addString(b: StringBuilder, start: String, sep: String, end: String): StringBuilder = {
+ var first = true
+
+ b append start
+ for (x <- self) {
+ if (first) {
+ b append x
+ first = false
+ }
+ else {
+ b append sep
+ b append x
+ }
+ }
+ b append end
+
+ b
+ }
+
+ /** Appends all elements of this $coll to a string builder using a separator string.
+ * The written text consists of the string representations (w.r.t. the method `toString`)
+ * of all elements of this $coll, separated by the string `sep`.
+ *
+ * @param b the string builder to which elements are appended.
+ * @param sep the separator string.
+ * @return the string builder `b` to which elements were appended.
+ */
+ def addString(b: StringBuilder, sep: String): StringBuilder = addString(b, "", sep, "")
+
+ /** Appends all elements of this $coll to a string builder.
+ * The written text consists of the string representations (w.r.t. the method `toString`)
+ * of all elements of this $coll without any separator string.
+ *
+ * @param b the string builder to which elements are appended.
+ * @return the string builder `b` to which elements were appended.
+ */
+ def addString(b: StringBuilder): StringBuilder = addString(b, "")
+}
diff --git a/src/library/scala/collection/TraversableProxy.scala b/src/library/scala/collection/TraversableProxy.scala
index 4a14937781..dd450dccac 100644
--- a/src/library/scala/collection/TraversableProxy.scala
+++ b/src/library/scala/collection/TraversableProxy.scala
@@ -11,7 +11,7 @@
package scala.collection
-// Methods could be printed by cat TraversibeLike.scala | egrep '^ (override )?def'
+// Methods could be printed by cat TraverableLike.scala | egrep '^ (override )?def'
/** This trait implements a proxy for traversable objects. It forwards
diff --git a/src/library/scala/collection/TraversableProxyLike.scala b/src/library/scala/collection/TraversableProxyLike.scala
index 24d6c7048d..fb8da98a6b 100644
--- a/src/library/scala/collection/TraversableProxyLike.scala
+++ b/src/library/scala/collection/TraversableProxyLike.scala
@@ -24,23 +24,22 @@ import mutable.{Buffer, StringBuilder}
* @version 2.8
* @since 2.8
*/
-trait TraversableProxyLike[+A, +This <: TraversableLike[A, This] with Traversable[A]] extends TraversableLike[A, This] with Proxy {
- def self: This
+trait TraversableProxyLike[+A, +Repr <: TraversableLike[A, Repr] with Traversable[A]] extends TraversableLike[A, Repr] with Proxy {
+ def self: Repr
override def foreach[B](f: A => B): Unit = self.foreach(f)
override def isEmpty: Boolean = self.isEmpty
override def nonEmpty: Boolean = self.nonEmpty
override def size: Int = self.size
override def hasDefiniteSize = self.hasDefiniteSize
- override def ++[B >: A, That](that: Traversable[B])(implicit bf: CanBuildFrom[This, B, That]): That = self.++(that)(bf)
- override def ++[B >: A, That](that: Iterator[B])(implicit bf: CanBuildFrom[This, B, That]): That = self.++(that)(bf)
- override def map[B, That](f: A => B)(implicit bf: CanBuildFrom[This, B, That]): That = self.map(f)(bf)
- override def flatMap[B, That](f: A => Traversable[B])(implicit bf: CanBuildFrom[This, B, That]): That = self.flatMap(f)(bf)
- override def partialMap[B, That](pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[This, B, That]): That = self.partialMap(pf)(bf)
- override def filter(p: A => Boolean): This = self.filter(p)
- override def filterNot(p: A => Boolean): This = self.filterNot(p)
- override def partition(p: A => Boolean): (This, This) = self.partition(p)
- override def groupBy[K](f: A => K): Map[K, This] = self.groupBy(f)
+ override def ++[B >: A, That](xs: TraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = self.++(xs)(bf)
+ override def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.map(f)(bf)
+ override def flatMap[B, That](f: A => Traversable[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = self.flatMap(f)(bf)
+ override def filter(p: A => Boolean): Repr = self.filter(p)
+ override def filterNot(p: A => Boolean): Repr = self.filterNot(p)
+ override def collect[B, That](pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[Repr, B, That]): That = self.collect(pf)(bf)
+ override def partition(p: A => Boolean): (Repr, Repr) = self.partition(p)
+ override def groupBy[K](f: A => K): Map[K, Repr] = self.groupBy(f)
override def forall(p: A => Boolean): Boolean = self.forall(p)
override def exists(p: A => Boolean): Boolean = self.exists(p)
override def count(p: A => Boolean): Int = self.count(p)
@@ -53,28 +52,37 @@ trait TraversableProxyLike[+A, +This <: TraversableLike[A, This] with Traversabl
override def reduceLeftOption[B >: A](op: (B, A) => B): Option[B] = self.reduceLeftOption(op)
override def reduceRight[B >: A](op: (A, B) => B): B = self.reduceRight(op)
override def reduceRightOption[B >: A](op: (A, B) => B): Option[B] = self.reduceRightOption(op)
+ override def scanLeft[B, That](z: B)(op: (B, A) => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.scanLeft(z)(op)(bf)
+ override def scanRight[B, That](z: B)(op: (A, B) => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = self.scanRight(z)(op)(bf)
+ override def sum[B >: A](implicit num: Numeric[B]): B = self.sum(num)
+ override def product[B >: A](implicit num: Numeric[B]): B = self.product(num)
+ override def min[B >: A](implicit cmp: Ordering[B]): A = self.min(cmp)
+ override def max[B >: A](implicit cmp: Ordering[B]): A = self.max(cmp)
override def head: A = self.head
override def headOption: Option[A] = self.headOption
- override def tail: This = self.tail
+ override def tail: Repr = self.tail
override def last: A = self.last
override def lastOption: Option[A] = self.lastOption
- override def init: This = self.init
- override def take(n: Int): This = self.take(n)
- override def drop(n: Int): This = self.drop(n)
- override def slice(from: Int, until: Int): This = self.slice(from, until)
- override def takeWhile(p: A => Boolean): This = self.takeWhile(p)
- override def dropWhile(p: A => Boolean): This = self.dropWhile(p)
- override def span(p: A => Boolean): (This, This) = self.span(p)
- override def splitAt(n: Int): (This, This) = self.splitAt(n)
+ override def init: Repr = self.init
+ override def take(n: Int): Repr = self.take(n)
+ override def drop(n: Int): Repr = self.drop(n)
+ override def slice(from: Int, until: Int): Repr = self.slice(from, until)
+ override def takeWhile(p: A => Boolean): Repr = self.takeWhile(p)
+ override def dropWhile(p: A => Boolean): Repr = self.dropWhile(p)
+ override def span(p: A => Boolean): (Repr, Repr) = self.span(p)
+ override def splitAt(n: Int): (Repr, Repr) = self.splitAt(n)
override def copyToBuffer[B >: A](dest: Buffer[B]) = self.copyToBuffer(dest)
override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) = self.copyToArray(xs, start, len)
override def copyToArray[B >: A](xs: Array[B], start: Int) = self.copyToArray(xs, start)
+ override def copyToArray[B >: A](xs: Array[B]) = self.copyToArray(xs)
override def toArray[B >: A: ClassManifest]: Array[B] = self.toArray
override def toList: List[A] = self.toList
override def toIterable: Iterable[A] = self.toIterable
override def toSeq: Seq[A] = self.toSeq
+ override def toIndexedSeq[B >: A]: mutable.IndexedSeq[B] = self.toIndexedSeq
override def toStream: Stream[A] = self.toStream
override def toSet[B >: A]: immutable.Set[B] = self.toSet
+ override def toMap[T, U](implicit ev: A <:< (T, U)): immutable.Map[T, U] = self.toMap(ev)
override def mkString(start: String, sep: String, end: String): String = self.mkString(start, sep, end)
override def mkString(sep: String): String = self.mkString(sep)
override def mkString: String = self.mkString
@@ -83,14 +91,18 @@ trait TraversableProxyLike[+A, +This <: TraversableLike[A, This] with Traversabl
override def addString(b: StringBuilder): StringBuilder = self.addString(b)
override def stringPrefix : String = self.stringPrefix
override def view = self.view
- override def view(from: Int, until: Int): TraversableView[A, This] = self.view(from, until)
+ override def view(from: Int, until: Int): TraversableView[A, Repr] = self.view(from, until)
}
-private class TraversableProxyLikeConfirmation[+A, +This <: TraversableLike[A, This] with Traversable[A]]
+/** Martin to Paul: I'm not sure what the purpose of this class is? I assume it was to make
+ * sure that TraversableProxyLike has all Traversable methods, but it fails at that
+ *
+private class TraversableProxyLikeConfirmation[+A, +Repr <: TraversableLike[A, Repr] with Traversable[A]]
extends TraversableProxyLike[A, Traversable[A]]
with interfaces.TraversableMethods[A, Traversable[A]]
{
- def self: This = repr.asInstanceOf[This]
+ def self: Repr = repr.asInstanceOf[Repr]
protected[this] def newBuilder = scala.collection.Traversable.newBuilder[A]
- // : Builder[A, This]
+ // : Builder[A, Repr]
}
+*/
diff --git a/src/library/scala/collection/TraversableView.scala b/src/library/scala/collection/TraversableView.scala
index 8a67b8d10f..e9332097e7 100644
--- a/src/library/scala/collection/TraversableView.scala
+++ b/src/library/scala/collection/TraversableView.scala
@@ -18,7 +18,7 @@ import TraversableView.NoBuilder
/** <p>
* A base class for views of <a href="../Traversable.html"
* target="ContentFrame"><code>Traversable<code></a>.<br/>
- * Every subclass has to implenment the <code>foreach</code> method.
+ * Every subclass has to implement the <code>foreach</code> method.
* </p>
*
* @author Martin Odersky
diff --git a/src/library/scala/collection/TraversableViewLike.scala b/src/library/scala/collection/TraversableViewLike.scala
index 84c33296db..09e6a65158 100644
--- a/src/library/scala/collection/TraversableViewLike.scala
+++ b/src/library/scala/collection/TraversableViewLike.scala
@@ -12,7 +12,7 @@
package scala.collection
import generic._
-import mutable.Builder
+import mutable.{Builder, ArrayBuffer}
import TraversableView.NoBuilder
/** <p>
@@ -47,8 +47,23 @@ self =>
b.result()
}
+ /** The implementation base trait of this view.
+ * This trait and all its subtraits has to be re-implemented for each
+ * ViewLike class.
+ */
trait Transformed[+B] extends TraversableView[B, Coll] {
lazy val underlying = self.underlying
+ override def toString = stringPrefix+"(...)"
+ }
+
+ /** A fall back which forces everything into a vector and then applies an operation
+ * on it. Used for those operations which do not naturally lend themselves to a view
+ */
+ trait Forced[B] extends Transformed[B] {
+ protected[this] def forced: Seq[B]
+ private[this] lazy val forcedCache = forced
+ override def foreach[U](f: B => U) = forcedCache.foreach(f)
+ override def stringPrefix = self.stringPrefix+"C"
}
/** pre: from >= 0
@@ -56,7 +71,7 @@ self =>
trait Sliced extends Transformed[A] {
protected[this] val from: Int
protected[this] val until: Int
- override def foreach[C](f: A => C) {
+ override def foreach[U](f: A => U) {
var index = 0
for (x <- self) {
if (from <= index) {
@@ -73,7 +88,7 @@ self =>
trait Mapped[B] extends Transformed[B] {
protected[this] val mapping: A => B
- override def foreach[C](f: B => C) {
+ override def foreach[U](f: B => U) {
for (x <- self)
f(mapping(x))
}
@@ -82,7 +97,7 @@ self =>
trait FlatMapped[B] extends Transformed[B] {
protected[this] val mapping: A => Traversable[B]
- override def foreach[C](f: B => C) {
+ override def foreach[U](f: B => U) {
for (x <- self)
for (y <- mapping(x))
f(y)
@@ -92,7 +107,7 @@ self =>
trait Appended[B >: A] extends Transformed[B] {
protected[this] val rest: Traversable[B]
- override def foreach[C](f: B => C) {
+ override def foreach[U](f: B => U) {
for (x <- self) f(x)
for (x <- rest) f(x)
}
@@ -101,7 +116,7 @@ self =>
trait Filtered extends Transformed[A] {
protected[this] val pred: A => Boolean
- override def foreach[C](f: A => C) {
+ override def foreach[U](f: A => U) {
for (x <- self)
if (pred(x)) f(x)
}
@@ -110,7 +125,7 @@ self =>
trait TakenWhile extends Transformed[A] {
protected[this] val pred: A => Boolean
- override def foreach[C](f: A => C) {
+ override def foreach[U](f: A => U) {
for (x <- self) {
if (!pred(x)) return
f(x)
@@ -121,7 +136,7 @@ self =>
trait DroppedWhile extends Transformed[A] {
protected[this] val pred: A => Boolean
- override def foreach[C](f: A => C) {
+ override def foreach[U](f: A => U) {
var go = false
for (x <- self) {
if (!go && !pred(x)) go = true
@@ -134,6 +149,7 @@ self =>
/** Boilerplate method, to override in each subclass
* This method could be eliminated if Scala had virtual classes
*/
+ protected def newForced[B](xs: => Seq[B]): Transformed[B] = new Forced[B] { val forced = xs }
protected def newAppended[B >: A](that: Traversable[B]): Transformed[B] = new Appended[B] { val rest = that }
protected def newMapped[B](f: A => B): Transformed[B] = new Mapped[B] { val mapping = f }
protected def newFlatMapped[B](f: A => Traversable[B]): Transformed[B] = new FlatMapped[B] { val mapping = f }
@@ -142,14 +158,12 @@ self =>
protected def newDroppedWhile(p: A => Boolean): Transformed[A] = new DroppedWhile { val pred = p }
protected def newTakenWhile(p: A => Boolean): Transformed[A] = new TakenWhile { val pred = p }
- override def ++[B >: A, That](that: Traversable[B])(implicit bf: CanBuildFrom[This, B, That]): That = {
- newAppended(that).asInstanceOf[That]
+ override def ++[B >: A, That](xs: TraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That = {
+ newAppended(xs.toTraversable).asInstanceOf[That]
// was: if (bf.isInstanceOf[ByPassCanBuildFrom]) newAppended(that).asInstanceOf[That]
// else super.++[B, That](that)(bf)
}
- override def ++[B >: A, That](that: Iterator[B])(implicit bf: CanBuildFrom[This, B, That]): That = ++[B, That](that.toStream)
-
override def map[B, That](f: A => B)(implicit bf: CanBuildFrom[This, B, That]): That = {
newMapped(f).asInstanceOf[That]
// val b = bf(repr)
@@ -157,6 +171,9 @@ self =>
// else super.map[B, That](f)(bf)
}
+ override def collect[B, That](pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[This, B, That]): That =
+ filter(pf.isDefinedAt).map(pf)(bf)
+
override def flatMap[B, That](f: A => Traversable[B])(implicit bf: CanBuildFrom[This, B, That]): That = {
newFlatMapped(f).asInstanceOf[That]
// was: val b = bf(repr)
@@ -164,7 +181,14 @@ self =>
// else super.flatMap[B, That](f)(bf)
}
+ protected[this] def thisSeq: Seq[A] = {
+ val buf = new ArrayBuffer[A]
+ self foreach (buf +=)
+ buf.result
+ }
+
override def filter(p: A => Boolean): This = newFiltered(p).asInstanceOf[This]
+ override def partition(p: A => Boolean): (This, This) = (filter(p), filter(!p(_)))
override def init: This = newSliced(0, size - 1).asInstanceOf[This]
override def drop(n: Int): This = newSliced(n max 0, Int.MaxValue).asInstanceOf[This]
override def take(n: Int): This = newSliced(0, n).asInstanceOf[This]
@@ -173,5 +197,17 @@ self =>
override def takeWhile(p: A => Boolean): This = newTakenWhile(p).asInstanceOf[This]
override def span(p: A => Boolean): (This, This) = (takeWhile(p), dropWhile(p))
override def splitAt(n: Int): (This, This) = (take(n), drop(n))
+
+ override def scanLeft[B, That](z: B)(op: (B, A) => B)(implicit bf: CanBuildFrom[This, B, That]): That =
+ newForced(thisSeq.scanLeft(z)(op)).asInstanceOf[That]
+
+ override def scanRight[B, That](z: B)(op: (A, B) => B)(implicit bf: CanBuildFrom[This, B, That]): That =
+ newForced(thisSeq.scanRight(z)(op)).asInstanceOf[That]
+
+ override def groupBy[K](f: A => K): Map[K, This] =
+ thisSeq.groupBy(f).mapValues(xs => newForced(xs).asInstanceOf[This])
+
override def stringPrefix = "TraversableView"
}
+
+
diff --git a/src/library/scala/collection/generic/Addable.scala b/src/library/scala/collection/generic/Addable.scala
index 9686e96c09..ecbd8301b6 100644
--- a/src/library/scala/collection/generic/Addable.scala
+++ b/src/library/scala/collection/generic/Addable.scala
@@ -52,16 +52,5 @@ trait Addable[A, +Repr <: Addable[A, Repr]] { self =>
* @param elems the collection containing the added elements.
* @return a new $coll with the given elements added.
*/
- def ++ (elems: Traversable[A]): Repr = (repr /: elems) (_ + _)
-
- /** Creates a new $coll by adding all elements produced by an iterator to this $coll.
- *
- * @param iter the iterator producing the added elements.
- * @return a new $coll with the given elements added.
- */
- def ++ (iter: Iterator[A]): Repr = (repr /: iter) (_ + _)
+ def ++ (xs: TraversableOnce[A]): Repr = (repr /: xs) (_ + _)
}
-
-
-
-
diff --git a/src/library/scala/collection/generic/GenericTraversableTemplate.scala b/src/library/scala/collection/generic/GenericTraversableTemplate.scala
index 0e3c3c203b..683f609686 100644
--- a/src/library/scala/collection/generic/GenericTraversableTemplate.scala
+++ b/src/library/scala/collection/generic/GenericTraversableTemplate.scala
@@ -100,8 +100,8 @@ trait GenericTraversableTemplate[+A, +CC[X] <: Traversable[X]] extends HasNewBui
}
/** Transposes this $coll of traversable collections into
- * @B the type of the elements of each traversable collection.
- * @asTraversable an implicit conversion which asserts that the element type of this
+ * @tparam B the type of the elements of each traversable collection.
+ * @param asTraversable an implicit conversion which asserts that the element type of this
* $coll is a `Traversable`.
* @return a two-dimensional $coll of ${coll}s which has as ''n''th row
* the ''n''th column of this $coll.
diff --git a/src/library/scala/collection/generic/Growable.scala b/src/library/scala/collection/generic/Growable.scala
index 7950dee9de..80f933a901 100644
--- a/src/library/scala/collection/generic/Growable.scala
+++ b/src/library/scala/collection/generic/Growable.scala
@@ -16,7 +16,6 @@ package generic
* a `clear` method.
*
* @author Martin Odersky
- * @owner Martin Odersky
* @version 2.8
* @since 2.8
* @define coll growable collection
@@ -42,26 +41,15 @@ trait Growable[-A] {
*/
def +=(elem1: A, elem2: A, elems: A*): this.type = this += elem1 += elem2 ++= elems
- /** ${Add}s all elements produced by an iterator to this $coll.
+ /** ${Add}s all elements produced by a TraversableOnce to this $coll.
*
- * @param iter the iterator producing the elements to $add.
+ * @param iter the TraversableOnce producing the elements to $add.
* @return the $coll itself.
*/
- def ++=(iter: Iterator[A]): this.type = { iter foreach += ; this }
-
- /** ${Add}s all elements contained in a traversable collection to this $coll.
- *
- * @param elems the collection containing the elements to $add.
- * @return the $coll itself.
- */
- def ++=(elems: Traversable[A]): this.type = { elems foreach +=; this }
+ def ++=(xs: TraversableOnce[A]): this.type = { xs foreach += ; this }
/** Clears the $coll's contents. After this operation, the
* $coll is empty.
*/
def clear()
}
-
-
-
-
diff --git a/src/library/scala/collection/generic/IterableForwarder.scala b/src/library/scala/collection/generic/IterableForwarder.scala
index 9cd1ccd843..f4aef2fcbb 100644
--- a/src/library/scala/collection/generic/IterableForwarder.scala
+++ b/src/library/scala/collection/generic/IterableForwarder.scala
@@ -22,7 +22,7 @@ import collection.mutable.Buffer
* <li><code>toString</code>, <code>hashCode</code>, <code>equals</code>,
* <code>stringPrefix</code></li>
* <li><code>newBuilder</code>, <code>view</code></li>
- * <li>all calls creating a new iterable objetc of the same kind</li>
+ * <li>all calls creating a new iterable object of the same kind</li>
* </ul>
* <p>
* The above methods are forwarded by subclass <a href="../IterableProxy.html"
@@ -41,6 +41,6 @@ trait IterableForwarder[+A] extends Iterable[A] with TraversableForwarder[A] {
// Iterable delegates
// Iterable methods could be printed by cat IterableLike.scala | sed -n '/trait Iterable/,$ p' | egrep '^ (override )?def'
- override def iterator = underlying.iterator
+ override def iterator: Iterator[A] = underlying.iterator
override def sameElements[B >: A](that: Iterable[B]): Boolean = underlying.sameElements(that)
}
diff --git a/src/library/scala/collection/generic/SeqForwarder.scala b/src/library/scala/collection/generic/SeqForwarder.scala
index 0ecdaf4566..e5dbe4b79d 100644
--- a/src/library/scala/collection/generic/SeqForwarder.scala
+++ b/src/library/scala/collection/generic/SeqForwarder.scala
@@ -30,24 +30,31 @@ trait SeqForwarder[+A] extends Seq[A] with IterableForwarder[A] {
protected override def underlying: Seq[A]
- // PartialFunction delegates
-
- override def apply(i: Int): A = underlying.apply(i)
- override def isDefinedAt(x: Int): Boolean = underlying.isDefinedAt(x)
-
- // Seq delegates
- // Seq methods could be printed by cat SeqLike.scala | sed -n '/trait Seq/,$ p' | egrep '^ (override )?def'
-
override def length: Int = underlying.length
- override def lengthCompare(l: Int) = underlying lengthCompare l
+ override def apply(idx: Int): A = underlying.apply(idx)
+ override def lengthCompare(len: Int): Int = underlying.lengthCompare(len)
+ override def isDefinedAt(x: Int): Boolean = underlying.isDefinedAt(x)
override def segmentLength(p: A => Boolean, from: Int): Int = underlying.segmentLength(p, from)
override def prefixLength(p: A => Boolean) = underlying.prefixLength(p)
+ override def indexWhere(p: A => Boolean): Int = underlying.indexWhere(p)
override def indexWhere(p: A => Boolean, from: Int): Int = underlying.indexWhere(p, from)
+ override def findIndexOf(p: A => Boolean): Int = underlying.indexWhere(p)
+ override def indexOf[B >: A](elem: B): Int = underlying.indexOf(elem)
override def indexOf[B >: A](elem: B, from: Int): Int = underlying.indexOf(elem, from)
+ override def lastIndexOf[B >: A](elem: B): Int = underlying.lastIndexOf(elem)
+ override def lastIndexOf[B >: A](elem: B, end: Int): Int = underlying.lastIndexOf(elem, end)
+ override def lastIndexWhere(p: A => Boolean): Int = underlying.lastIndexWhere(p)
+ override def lastIndexWhere(p: A => Boolean, end: Int): Int = underlying.lastIndexWhere(p, end)
override def reverseIterator: Iterator[A] = underlying.reverseIterator
override def startsWith[B](that: Seq[B], offset: Int): Boolean = underlying.startsWith(that, offset)
+ override def startsWith[B](that: Seq[B]): Boolean = underlying.startsWith(that)
override def endsWith[B](that: Seq[B]): Boolean = underlying.endsWith(that)
override def indexOfSlice[B >: A](that: Seq[B]): Int = underlying.indexOfSlice(that)
+ override def indexOfSlice[B >: A](that: Seq[B], from: Int): Int = underlying.indexOfSlice(that, from)
+ override def lastIndexOfSlice[B >: A](that: Seq[B]): Int = underlying.lastIndexOfSlice(that)
+ override def lastIndexOfSlice[B >: A](that: Seq[B], end: Int): Int = underlying.lastIndexOfSlice(that, end)
+ override def containsSlice[B](that: Seq[B]): Boolean = underlying.containsSlice(that)
override def contains(elem: Any): Boolean = underlying.contains(elem)
+ override def corresponds[B](that: Seq[B])(p: (A,B) => Boolean): Boolean = underlying.corresponds(that)(p)
override def indices: Range = underlying.indices
}
diff --git a/src/library/scala/collection/generic/Shrinkable.scala b/src/library/scala/collection/generic/Shrinkable.scala
index bd773a97f9..cf970e1232 100644
--- a/src/library/scala/collection/generic/Shrinkable.scala
+++ b/src/library/scala/collection/generic/Shrinkable.scala
@@ -15,7 +15,6 @@ package generic
* using a `-=` operator.
*
* @author Martin Odersky
- * @owner Martin Odersky
* @version 2.8
* @since 2.8
* @define coll shrinkable collection
@@ -48,14 +47,7 @@ trait Shrinkable[-A] {
* @param iter the iterator producing the elements to remove.
* @return the $coll itself
*/
- def --=(iter: Iterator[A]): this.type = { iter foreach -=; this }
-
- /** Removes all elements contained in a traversable collection from this $coll.
- *
- * @param iter the collection containing the elements to remove.
- * @return the $coll itself
- */
- def --=(iter: Traversable[A]): this.type = { iter foreach -=; this }
+ def --=(xs: TraversableOnce[A]): this.type = { xs foreach -= ; this }
}
diff --git a/src/library/scala/collection/generic/Sorted.scala b/src/library/scala/collection/generic/Sorted.scala
index 73bc1b6553..aa95c76a88 100644
--- a/src/library/scala/collection/generic/Sorted.scala
+++ b/src/library/scala/collection/generic/Sorted.scala
@@ -16,8 +16,8 @@ package generic
* @author Sean McDirmid
* @since 2.8
*/
-trait Sorted[K, +This <: Sorted[K, This]]{
- def ordering : Ordering[K];
+trait Sorted[K, +This <: Sorted[K, This]] {
+ def ordering : Ordering[K]
/** The current collection */
protected def repr: This
@@ -25,7 +25,6 @@ trait Sorted[K, +This <: Sorted[K, This]]{
/** return as a projection the set of keys in this collection */
def keySet: SortedSet[K]
-
/** Returns the first key of the collection. */
def firstKey: K
@@ -68,24 +67,25 @@ trait Sorted[K, +This <: Sorted[K, This]]{
*/
def range(from: K, until: K): This = rangeImpl(Some(from), Some(until))
-
/** Create a range projection of this collection with no lower-bound.
* @param to The upper-bound (inclusive) of the ranged projection.
*/
def to(to: K): This = {
// tough!
- val i = keySet.from(to).iterator;
- if (!i.hasNext) return repr
- val next = i.next;
- if (next == to) {
- if (!i.hasNext) return repr
- else return until(i.next)
- } else return until(next)
+ val i = keySet.from(to).iterator
+ if (i.isEmpty) return repr
+ val next = i.next
+ if (next == to)
+ if (i.isEmpty) repr
+ else until(i.next)
+ else
+ until(next)
}
protected def hasAll(j: Iterator[K]): Boolean = {
- val i = keySet.iterator;
- if (!i.hasNext) return !j.hasNext;
+ val i = keySet.iterator
+ if (i.isEmpty) return j.isEmpty
+
var in = i.next;
while (j.hasNext) {
val jn = j.next;
@@ -99,5 +99,4 @@ trait Sorted[K, +This <: Sorted[K, This]]{
}
true
}
-
}
diff --git a/src/library/scala/collection/generic/Subtractable.scala b/src/library/scala/collection/generic/Subtractable.scala
index 8ded9c22d1..b2051d2773 100644
--- a/src/library/scala/collection/generic/Subtractable.scala
+++ b/src/library/scala/collection/generic/Subtractable.scala
@@ -56,14 +56,5 @@ trait Subtractable[A, +Repr <: Subtractable[A, Repr]] { self =>
* @return a new $coll that contains all elements of the current $coll
* except one less occurrence of each of the elements of `elems`.
*/
- def --(elems: Traversable[A]): Repr = (repr /: elems) (_ - _)
-
- /** Creates a new $coll from this $coll by removing all elements produced
- * by an iterator.
- *
- * @param iter the iterator producing the removed elements.
- * @return a new $coll that contains all elements of the current $coll
- * except one less occurrence of each of the elements produced by `iter`.
- */
- def --(iter: Iterator[A]): Repr = (repr /: iter) (_ - _)
+ def --(xs: TraversableOnce[A]): Repr = (repr /: xs) (_ - _)
}
diff --git a/src/library/scala/collection/generic/TraversableFactory.scala b/src/library/scala/collection/generic/TraversableFactory.scala
index 4f2eb40a64..c2668b48a2 100644
--- a/src/library/scala/collection/generic/TraversableFactory.scala
+++ b/src/library/scala/collection/generic/TraversableFactory.scala
@@ -38,7 +38,7 @@ abstract class TraversableFactory[CC[X] <: Traversable[X] with GenericTraversabl
extends GenericCompanion[CC] {
/** A generic implementation of the `CanBuildFrom` trait, which forwards
- * all calls to `apply(from)` to the `genericBuilder` methof of
+ * all calls to `apply(from)` to the `genericBuilder` method of
* $coll `from`, and which forwards all calls of `apply()` to the
* `newBuilder` method of this factory.
*/
diff --git a/src/library/scala/collection/generic/TraversableForwarder.scala b/src/library/scala/collection/generic/TraversableForwarder.scala
index bd7f751288..dcba86f3d7 100644
--- a/src/library/scala/collection/generic/TraversableForwarder.scala
+++ b/src/library/scala/collection/generic/TraversableForwarder.scala
@@ -42,33 +42,47 @@ trait TraversableForwarder[+A] extends Traversable[A] {
/** The iterable object to which calls are forwarded */
protected def underlying: Traversable[A]
- // Iterable delegates
- // Iterable methods could be printed by cat TarversableLike.scala | sed -n '/trait Iterable/,$ p' | egrep '^ (override )?def'
-
- override def isEmpty = underlying.isEmpty
- override def nonEmpty = underlying.nonEmpty
+ override def foreach[B](f: A => B): Unit = underlying.foreach(f)
+ override def isEmpty: Boolean = underlying.isEmpty
+ override def nonEmpty: Boolean = underlying.nonEmpty
+ override def size: Int = underlying.size
override def hasDefiniteSize = underlying.hasDefiniteSize
- override def foreach[B](f: A => B) = underlying.foreach(f)
override def forall(p: A => Boolean): Boolean = underlying.forall(p)
override def exists(p: A => Boolean): Boolean = underlying.exists(p)
override def count(p: A => Boolean): Int = underlying.count(p)
override def find(p: A => Boolean): Option[A] = underlying.find(p)
override def foldLeft[B](z: B)(op: (B, A) => B): B = underlying.foldLeft(z)(op)
+ override def /: [B](z: B)(op: (B, A) => B): B = underlying./:(z)(op)
override def foldRight[B](z: B)(op: (A, B) => B): B = underlying.foldRight(z)(op)
+ override def :\ [B](z: B)(op: (A, B) => B): B = underlying.:\(z)(op)
override def reduceLeft[B >: A](op: (B, A) => B): B = underlying.reduceLeft(op)
- override def reduceRight[B >: A](op: (A, B) => B): B = underlying.reduceRight(op)
override def reduceLeftOption[B >: A](op: (B, A) => B): Option[B] = underlying.reduceLeftOption(op)
+ override def reduceRight[B >: A](op: (A, B) => B): B = underlying.reduceRight(op)
override def reduceRightOption[B >: A](op: (A, B) => B): Option[B] = underlying.reduceRightOption(op)
+ override def sum[B >: A](implicit num: Numeric[B]): B = underlying.sum(num)
+ override def product[B >: A](implicit num: Numeric[B]): B = underlying.product(num)
+ override def min[B >: A](implicit cmp: Ordering[B]): A = underlying.min(cmp)
+ override def max[B >: A](implicit cmp: Ordering[B]): A = underlying.max(cmp)
+ override def head: A = underlying.head
+ override def headOption: Option[A] = underlying.headOption
+ override def last: A = underlying.last
+ override def lastOption: Option[A] = underlying.lastOption
override def copyToBuffer[B >: A](dest: Buffer[B]) = underlying.copyToBuffer(dest)
override def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) = underlying.copyToArray(xs, start, len)
- override def toArray[B >: A : ClassManifest]: Array[B] = underlying.toArray
+ override def copyToArray[B >: A](xs: Array[B], start: Int) = underlying.copyToArray(xs, start)
+ override def copyToArray[B >: A](xs: Array[B]) = underlying.copyToArray(xs)
+ override def toArray[B >: A: ClassManifest]: Array[B] = underlying.toArray
override def toList: List[A] = underlying.toList
+ override def toIterable: Iterable[A] = underlying.toIterable
override def toSeq: Seq[A] = underlying.toSeq
+ override def toIndexedSeq[B >: A]: mutable.IndexedSeq[B] = underlying.toIndexedSeq
override def toStream: Stream[A] = underlying.toStream
+ override def toSet[B >: A]: immutable.Set[B] = underlying.toSet
+ override def toMap[T, U](implicit ev: A <:< (T, U)): immutable.Map[T, U] = underlying.toMap(ev)
override def mkString(start: String, sep: String, end: String): String = underlying.mkString(start, sep, end)
+ override def mkString(sep: String): String = underlying.mkString(sep)
+ override def mkString: String = underlying.mkString
override def addString(b: StringBuilder, start: String, sep: String, end: String): StringBuilder = underlying.addString(b, start, sep, end)
-
- override def head: A = underlying.head
- override def last: A = underlying.last
- override def lastOption: Option[A] = underlying.lastOption
+ override def addString(b: StringBuilder, sep: String): StringBuilder = underlying.addString(b, sep)
+ override def addString(b: StringBuilder): StringBuilder = underlying.addString(b)
}
diff --git a/src/library/scala/collection/generic/TraversableView.scala.1 b/src/library/scala/collection/generic/TraversableView.scala.1
deleted file mode 100644
index 3608de42be..0000000000
--- a/src/library/scala/collection/generic/TraversableView.scala.1
+++ /dev/null
@@ -1,152 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-package scalay.collection.generic
-
-import Math.MAX_INT
-import TraversableView.NoBuilder
-
-/** <p>
- * A base class for views of <code>Traversable</code>.
- * </p>
- * <p>
- * Every subclass has to implement the <code>foreach</code> method.
- * </p>
- *
- * @since 2.8
- */
-abstract class TraversableView[+A, +Coll <: Traversable[_]] extends Traversable[A] {
-self =>
-
- type This >: this.type <: TraversableView[A, Coll] { type This = self.This }
- protected val thisCollection: This = this
-
- protected[this] def newBuilder: Builder[A, This, This] =
- throw new UnsupportedOperationException(this+".newBuilder")
-
- def force[B >: A, That](implicit b: Builder[B, That, Coll]) = {
- b ++= this
- b.result()
- }
-
- trait Transformed[+B] extends TraversableView[B, Coll]
-
- /** pre: from >= 0
- */
- trait Sliced extends Transformed[A] {
- protected[this] val from: Int
- protected[this] val until: Int
- override def foreach(f: A => Unit) {
- var index = 0
- for (x <- self) {
- if (from <= index) {
- if (until <= index) return
- f(x)
- }
- index += 1
- }
- }
- override def stringPrefix = self.stringPrefix+"S"
- override def slice(from1: Int, until1: Int) =
- newSliced(from + (from1 max 0), from + (until1 max 0)).asInstanceOf[This]
- }
-
- trait Mapped[B] extends Transformed[B] {
- protected[this] val mapping: A => B
- override def foreach(f: B => Unit) {
- for (x <- self)
- f(mapping(x))
- }
- override def stringPrefix = self.stringPrefix+"M"
- }
-
- trait FlatMapped[B] extends Transformed[B] {
- protected[this] val mapping: A => Traversable[B]
- override def foreach(f: B => Unit) {
- for (x <- self)
- for (y <- mapping(x))
- f(y)
- }
- override def stringPrefix = self.stringPrefix+"N"
- }
-
- trait Appended[B >: A] extends Transformed[B] {
- protected[this] val rest: Traversable[B]
- override def foreach(f: B => Unit) {
- for (x <- self) f(x)
- for (x <- rest) f(x)
- }
- override def stringPrefix = self.stringPrefix+"A"
- }
-
- trait Filtered extends Transformed[A] {
- protected[this] val pred: A => Boolean
- override def foreach(f: A => Unit) {
- for (x <- self)
- if (pred(x)) f(x)
- }
- override def stringPrefix = self.stringPrefix+"F"
- }
-
- trait TakenWhile extends Transformed[A] {
- protected[this] val pred: A => Boolean
- override def foreach(f: A => Unit) {
- for (x <- self) {
- if (!pred(x)) return
- f(x)
- }
- }
- override def stringPrefix = self.stringPrefix+"T"
- }
-
- trait DroppedWhile extends Transformed[A] {
- protected[this] val pred: A => Boolean
- override def foreach(f: A => Unit) {
- var go = false
- for (x <- self) {
- if (!go && !pred(x)) go = true
- if (go) f(x)
- }
- }
- override def stringPrefix = self.stringPrefix+"D"
- }
-
- override def ++[B >: A, That](that: Traversable[B])(implicit b: Builder[B, That, This]): That =
- if (b.isInstanceOf[NoBuilder[_]]) newAppended(that).asInstanceOf[That]
- else super.++[B, That](that)(b)
-
- override def ++[B >: A, That](that: Iterator[B])(implicit b: Builder[B, That, This]): That = ++[B, That](that.toStream)
-
- override def map[B, That](f: A => B)(implicit b: Builder[B, That, This]): That =
- if (b.isInstanceOf[NoBuilder[_]]) newMapped(f).asInstanceOf[That]
- else super.map[B, That](f)(b)
-
- override def flatMap[B, That](f: A => Traversable[B])(implicit b: Builder[B, That, This]): That =
- if (b.isInstanceOf[NoBuilder[_]]) newFlatMapped(f).asInstanceOf[That]
- else super.flatMap[B, That](f)(b)
-
- override def filter(p: A => Boolean): This = newFiltered(p).asInstanceOf[This]
- override def init: This = newSliced(0, size - 1).asInstanceOf[This]
- override def drop(n: Int): This = newSliced(n max 0, MAX_INT).asInstanceOf[This]
- override def take(n: Int): This = newSliced(0, n).asInstanceOf[This]
- override def slice(from: Int, until: Int): This = newSliced(from max 0, until).asInstanceOf[This]
- override def dropWhile(p: A => Boolean): This = newDroppedWhile(p).asInstanceOf[This]
- override def takeWhile(p: A => Boolean): This = newTakenWhile(p).asInstanceOf[This]
- override def span(p: A => Boolean): (This, This) = (takeWhile(p), dropWhile(p))
- override def splitAt(n: Int): (This, This) = (take(n), drop(n))
-}
-
-object TraversableView {
- class NoBuilder[A] extends Builder[A, Nothing, TraversableView[_, _]] {
- def +=(elem: A) {}
- def iterator: Iterator[A] = Iterator.empty
- @deprecated("use `iterator' instead") def elements = iterator
- def result() = throw new UnsupportedOperationException("TraversableView.Builder.result")
- def clear() {}
- }
- implicit def implicitBuilder[A]: Builder[A, TraversableView[A, Traversable[_]], TraversableView[_, _]] = new NoBuilder
-}
diff --git a/src/library/scala/collection/immutable/DefaultMap.scala b/src/library/scala/collection/immutable/DefaultMap.scala
new file mode 100755
index 0000000000..7ee8197150
--- /dev/null
+++ b/src/library/scala/collection/immutable/DefaultMap.scala
@@ -0,0 +1,53 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+// $Id: DefaultMap.scala 20028 2009-12-07 11:49:19Z cunei $
+
+
+package scala.collection
+package immutable
+
+import generic._
+
+/** <p>
+ * A default map which implements the <code>updated</code> and <code>-</code>
+ * methods of maps.<br/>
+ * Instances that inherit from <code>DefaultMap[A, B]</code> still have to
+ * define:
+ * </p><pre>
+ * <b>def</b> get(key: A): Option[B]
+ * <b>def</b> iterator: Iterator[(A, B)]</pre>
+ * <p>
+ * It refers back to the original map.
+ * </p>
+ * <p>
+ * It might also be advisable to override <code>foreach</code> or
+ * <code>size</code> if efficient implementations can be found.
+ * </p>
+ *
+ * @since 2.8
+ */
+trait DefaultMap[A, +B] extends Map[A, B] { self =>
+
+ /** A default implementation which creates a new immutable map.
+ */
+ override def +[B1 >: B](kv: (A, B1)): Map[A, B1] = {
+ val b = Map.newBuilder[A, B1]
+ b ++= this
+ b += ((kv._1, kv._2))
+ b.result
+ }
+
+ /** A default implementation which creates a new immutable map.
+ */
+ override def - (key: A): Map[A, B] = {
+ val b = newBuilder
+ b ++= this filter (key !=)
+ b.result
+ }
+}
diff --git a/src/library/scala/collection/immutable/HashMap.scala b/src/library/scala/collection/immutable/HashMap.scala
index 2215e22f71..e0f801546c 100644
--- a/src/library/scala/collection/immutable/HashMap.scala
+++ b/src/library/scala/collection/immutable/HashMap.scala
@@ -16,184 +16,383 @@ import generic._
import annotation.unchecked.uncheckedVariance
/** <p>
- * This class implements immutable maps using a hash table.
- * </p>
- * <p>
- * It is optimized for sequential accesses where the last updated table is
- * accessed most often. It supports with reasonable efficiency accesses to
- * previous versions of the table by keeping a change log that's regularly
- * compacted. It needs to synchronize most methods, so it is less suitable
- * for highly concurrent accesses.
+ * This class implements immutable maps using a hash trie.
* </p>
*
* @note the builder of a hash map returns specialized representations EmptyMap,Map1,..., Map4
* for maps of size <= 4.
*
* @author Martin Odersky
- * @version 2.0, 19/01/2007
+ * @author Tiark Rompf
+ * @version 2.8
* @since 2.3
*/
-@serializable @SerialVersionUID(1L)
-class HashMap[A, +B] extends Map[A,B] with MapLike[A, B, HashMap[A, B]] with mutable.HashTable[A] {
+@serializable @SerialVersionUID(2L)
+class HashMap[A, +B] extends Map[A,B] with MapLike[A, B, HashMap[A, B]] {
- type Entry = scala.collection.mutable.DefaultEntry[A, Any]
-
- @transient protected var later: HashMap[A, B @uncheckedVariance] = null
- @transient protected var oldKey: A = _
- @transient protected var oldValue: Option[B @uncheckedVariance] = _
- @transient protected var deltaSize: Int = _
+ override def size: Int = 0
override def empty = HashMap.empty[A, B]
- def get(key: A): Option[B] = synchronized {
- var m: HashMap[A, _ >: B] = this
- var cnt = 0
- while (m.later != null) {
- if (key == m.oldKey) return m.oldValue.asInstanceOf[Option[B]]
- cnt += 1
- m = m.later
- }
- if (cnt > logLimit) makeCopy(m)
- val e = m.findEntry(key)
- if (e == null) None
- else Some(getValue(e))
- }
+ def iterator: Iterator[(A,B)] = Iterator.empty
- override def updated [B1 >: B] (key: A, value: B1): HashMap[A, B1] = synchronized {
- makeCopyIfUpdated()
- val e = findEntry(key)
- if (e == null) {
- markUpdated(key, None, 1)
- later.addEntry(new Entry(key, value))
- } else {
- markUpdated(key, Some(getValue(e)), 0)
- e.value = value
- }
- later.asInstanceOf[HashMap[A, B1]]
- }
+ override def foreach[U](f: ((A, B)) => U): Unit = { }
+
+ def get(key: A): Option[B] =
+ get0(key, computeHash(key), 0)
+
+ override def updated [B1 >: B] (key: A, value: B1): HashMap[A, B1] =
+ updated0(key, computeHash(key), 0, value, null)
+
+ override def + [B1 >: B] (kv: (A, B1)): HashMap[A, B1] =
+ updated0(kv._1, computeHash(kv._1), 0, kv._2, kv)
- /** Add a key/value pair to this map.
- * @param kv the key/value pair
- * @return A new map with the new binding added to this map
- */
- override def + [B1 >: B] (kv: (A, B1)): HashMap[A, B1] = updated(kv._1, kv._2)
-
- /** Adds two or more elements to this collection and returns
- * either the collection itself (if it is mutable), or a new collection
- * with the added elements.
- *
- * @param elem1 the first element to add.
- * @param elem2 the second element to add.
- * @param elems the remaining elements to add.
- */
override def + [B1 >: B] (elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *): HashMap[A, B1] =
this + elem1 + elem2 ++ elems
+ // TODO: optimize (might be able to use mutable updates)
- def - (key: A): HashMap[A, B] = synchronized {
- makeCopyIfUpdated()
- val e = findEntry(key)
- if (e == null) this
- else {
- markUpdated(key, Some(getValue(e)), -1)
- later removeEntry key
- later.asInstanceOf[HashMap[A, B]]
- }
- }
+ def - (key: A): HashMap[A, B] =
+ removed0(key, computeHash(key), 0)
- override def size: Int = synchronized {
- var m: HashMap[A, _ >: B] = this
- var cnt = 0
- var s = 0
- while (m.later != null) {
- s -= m.deltaSize
- cnt += 1
- m = m.later
- }
- s += m.tableSize
- if (cnt > logLimit) makeCopy(m)
- s
- }
+ protected def elemHashCode(key: A) = if (key == null) 0 else key.hashCode()
- def iterator = synchronized {
- makeCopyIfUpdated()
- entriesIterator map {e => (e.key, getValue(e))}
+ protected final def improve(hcode: Int) = {
+ var h: Int = hcode + ~(hcode << 9)
+ h = h ^ (h >>> 14)
+ h = h + (h << 4)
+ h ^ (h >>> 10)
}
- private def getValue(e: Entry) =
- e.value.asInstanceOf[B]
-
- private def logLimit: Int = math.sqrt(table.length).toInt
-
- private[this] def markUpdated(key: A, ov: Option[B], delta: Int) {
- val lf = loadFactor
- later = new HashMap[A, B] {
- override def initialSize = 0
- /* We need to do this to avoid a reference to the outer HashMap */
- def _newLoadFactor = lf
- override def loadFactor = _newLoadFactor
- table = HashMap.this.table
- tableSize = HashMap.this.tableSize
- threshold = HashMap.this.threshold
- }
- oldKey = key
- oldValue = ov
- deltaSize = delta
- }
+ protected def computeHash(key: A) = improve(elemHashCode(key))
- private def makeCopy(last: HashMap[A, _ >: B]) {
- def undo(m: HashMap[A, _ >: B]) {
- if (m ne last) {
- undo(m.later)
- if (m.deltaSize == 1) removeEntry(m.oldKey)
- else if (m.deltaSize == 0) findEntry(m.oldKey).value = m.oldValue.get
- else if (m.deltaSize == -1) addEntry(new Entry(m.oldKey, m.oldValue.get))
- }
- }
- def copy(e: Entry): Entry =
- if (e == null) null
- else {
- val rest = copy(e.next)
- val result = new Entry(e.key, e.value)
- result.next = rest
- result
- }
- val ltable = last.table
- val s = ltable.length
- table = new scala.Array[collection.mutable.HashEntry[A, Entry]](s)
- var i = 0
- while (i < s) {
- table(i) = copy(ltable(i).asInstanceOf[Entry])
- i += 1
- }
- tableSize = last.tableSize
- threshold = last.threshold
- undo(this)
- later = null
- }
+ protected def get0(key: A, hash: Int, level: Int): Option[B] = None
- private def makeCopyIfUpdated() {
- var m: HashMap[A, _ >: B] = this
- while (m.later != null) m = m.later
- if (m ne this) makeCopy(m)
- }
+ protected def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1)): HashMap[A, B1] =
+ new HashMap.HashMap1(key, hash, value, kv)
- private def writeObject(out: java.io.ObjectOutputStream) {
- serializeTo(out, _.value)
- }
- private def readObject(in: java.io.ObjectInputStream) {
- init[B](in, new Entry(_, _))
- }
+
+ protected def removed0(key: A, hash: Int, level: Int): HashMap[A, B] = this
+
}
/** A factory object for immutable HashMaps.
*
* @author Martin Odersky
+ * @author Tiark Rompf
* @version 2.8
* @since 2.3
*/
object HashMap extends ImmutableMapFactory[HashMap] {
implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), HashMap[A, B]] = new MapCanBuildFrom[A, B]
- def empty[A, B]: HashMap[A, B] = new HashMap
+ def empty[A, B]: HashMap[A, B] = EmptyHashMap.asInstanceOf[HashMap[A, B]]
+
+ private object EmptyHashMap extends HashMap[Any,Nothing] {
+
+ }
+
+ // TODO: add HashMap2, HashMap3, ...
+
+ class HashMap1[A,+B](private var key: A, private[HashMap] var hash: Int, private var value: (B @uncheckedVariance), private var kv: (A,B @uncheckedVariance)) extends HashMap[A,B] {
+ override def size = 1
+
+ override def get0(key: A, hash: Int, level: Int): Option[B] =
+ if (hash == this.hash && key == this.key) Some(value) else None
+
+ override def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1)): HashMap[A, B1] =
+ if (hash == this.hash && key == this.key) new HashMap1(key, hash, value, kv)
+ else {
+ if (hash != this.hash) {
+ //new HashTrieMap[A,B1](level+5, this, new HashMap1(key, hash, value, kv))
+ val m = new HashTrieMap[A,B1](0,new Array[HashMap[A,B1]](0),0) // TODO: could save array alloc
+ m.updated0(this.key, this.hash, level, this.value, this.kv).updated0(key, hash, level, value, kv)
+ } else {
+ // 32-bit hash collision (rare, but not impossible)
+ // wrap this in a HashTrieMap if called with level == 0 (otherwise serialization won't work)
+ if (level == 0) {
+ val elems = new Array[HashMap[A,B1]](1)
+ elems(0) = new HashMapCollision1(hash, ListMap.empty.updated(this.key,this.value).updated(key,value))
+ new HashTrieMap[A,B1](1 << ((hash >>> level) & 0x1f), elems, 2)
+ } else {
+ new HashMapCollision1(hash, ListMap.empty.updated(this.key,this.value).updated(key,value))
+ }
+ }
+ }
+
+ override def removed0(key: A, hash: Int, level: Int): HashMap[A, B] =
+ if (hash == this.hash && key == this.key) HashMap.empty[A,B] else this
+
+ override def iterator: Iterator[(A,B)] = Iterator(ensurePair)
+ override def foreach[U](f: ((A, B)) => U): Unit = f(ensurePair)
+ private[HashMap] def ensurePair: (A,B) = if (kv ne null) kv else { kv = (key, value); kv }
+
+ private def writeObject(out: java.io.ObjectOutputStream) {
+ out.writeObject(key)
+ out.writeObject(value)
+ }
+
+ private def readObject(in: java.io.ObjectInputStream) {
+ key = in.readObject().asInstanceOf[A]
+ value = in.readObject().asInstanceOf[B]
+ hash = computeHash(key)
+ }
+
+ }
+
+ private class HashMapCollision1[A,+B](private[HashMap] var hash: Int, var kvs: ListMap[A,B @uncheckedVariance]) extends HashMap[A,B] {
+ override def size = kvs.size
+
+ override def get0(key: A, hash: Int, level: Int): Option[B] =
+ if (hash == this.hash) kvs.get(key) else None
+
+ override def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1)): HashMap[A, B1] =
+ if (hash == this.hash) new HashMapCollision1(hash, kvs.updated(key, value))
+ else {
+ var m: HashMap[A,B1] = new HashTrieMap[A,B1](0,new Array[HashMap[A,B1]](0),0)
+ // might be able to save some ops here, but it doesn't seem to be worth it
+ for ((k,v) <- kvs)
+ m = m.updated0(k, this.hash, level, v, null)
+ m.updated0(key, hash, level, value, kv)
+ }
+
+ override def removed0(key: A, hash: Int, level: Int): HashMap[A, B] =
+ if (hash == this.hash) {
+ val kvs1 = kvs - key
+ if (!kvs1.isEmpty)
+ new HashMapCollision1(hash, kvs1)
+ else
+ HashMap.empty[A,B]
+ } else this
+
+ override def iterator: Iterator[(A,B)] = kvs.iterator
+ override def foreach[U](f: ((A, B)) => U): Unit = kvs.foreach(f)
+
+ private def writeObject(out: java.io.ObjectOutputStream) {
+ // this cannot work - reading things in might produce different
+ // hash codes and remove the collision. however this is never called
+ // because no references to this class are ever handed out to client code
+ // and HashTrieMap serialization takes care of the situation
+ error("cannot serialize an immutable.HashMap where all items have the same 32-bit hash code")
+ //out.writeObject(kvs)
+ }
+
+ private def readObject(in: java.io.ObjectInputStream) {
+ error("cannot deserialize an immutable.HashMap where all items have the same 32-bit hash code")
+ //kvs = in.readObject().asInstanceOf[ListMap[A,B]]
+ //hash = computeHash(kvs.)
+ }
+
+ }
+
+
+ class HashTrieMap[A,+B](private var bitmap: Int, private var elems: Array[HashMap[A,B @uncheckedVariance]],
+ private var size0: Int) extends HashMap[A,B] {
+/*
+ def this (level: Int, m1: HashMap1[A,B], m2: HashMap1[A,B]) = {
+ this(((m1.hash >>> level) & 0x1f) | ((m2.hash >>> level) & 0x1f), {
+ val idx1 = (m1.hash >>> level) & 0x1f
+ val idx2 = (m2.hash >>> level) & 0x1f
+ assert(idx1 != idx2, m1.hash + "==" + m2.hash + " at level " + level) // TODO
+ val elems = new Array[HashMap[A,B]](2)
+ if (idx1 < idx2) {
+ elems(0) = m1
+ elems(1) = m2
+ } else {
+ elems(0) = m2
+ elems(1) = m1
+ }
+ elems
+ }, 2)
+ }
+*/
+ override def size = size0
+
+ override def get0(key: A, hash: Int, level: Int): Option[B] = {
+ val index = (hash >>> level) & 0x1f
+ val mask = (1 << index)
+ if (bitmap == - 1) {
+ elems(index & 0x1f).get0(key, hash, level + 5)
+ } else if ((bitmap & mask) != 0) {
+ val offset = Integer.bitCount(bitmap & (mask-1))
+ // TODO: might be worth checking if sub is HashTrieMap (-> monomorphic call site)
+ elems(offset).get0(key, hash, level + 5)
+ } else
+ None
+ }
+
+ override def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1)): HashMap[A, B1] = {
+ val index = (hash >>> level) & 0x1f
+ val mask = (1 << index)
+ val offset = Integer.bitCount(bitmap & (mask-1))
+ if ((bitmap & mask) != 0) {
+ val elemsNew = new Array[HashMap[A,B1]](elems.length)
+ Array.copy(elems, 0, elemsNew, 0, elems.length)
+ val sub = elems(offset)
+ // TODO: might be worth checking if sub is HashTrieMap (-> monomorphic call site)
+ val subNew = sub.updated0(key, hash, level + 5, value, kv)
+ elemsNew(offset) = subNew
+ new HashTrieMap(bitmap, elemsNew, size + (subNew.size - sub.size))
+ } else {
+ val elemsNew = new Array[HashMap[A,B1]](elems.length + 1)
+ Array.copy(elems, 0, elemsNew, 0, offset)
+ elemsNew(offset) = new HashMap1(key, hash, value, kv)
+ Array.copy(elems, offset, elemsNew, offset + 1, elems.length - offset)
+ val bitmapNew = bitmap | mask
+ new HashTrieMap(bitmapNew, elemsNew, size + 1)
+ }
+ }
+
+ override def removed0(key: A, hash: Int, level: Int): HashMap[A, B] = {
+ val index = (hash >>> level) & 0x1f
+ val mask = (1 << index)
+ val offset = Integer.bitCount(bitmap & (mask-1))
+ if (((bitmap >>> index) & 1) == 1) {
+ val elemsNew = new Array[HashMap[A,B]](elems.length)
+ Array.copy(elems, 0, elemsNew, 0, elems.length)
+ val sub = elems(offset)
+ // TODO: might be worth checking if sub is HashTrieMap (-> monomorphic call site)
+ val subNew = sub.removed0(key, hash, level + 5)
+ elemsNew(offset) = subNew
+ // TODO: handle shrinking
+ val sizeNew = size + (subNew.size - sub.size)
+ if (sizeNew > 0)
+ new HashTrieMap(bitmap, elemsNew, size + (subNew.size - sub.size))
+ else
+ HashMap.empty[A,B]
+ } else {
+ this
+ }
+ }
+
+/*
+ override def iterator = { // TODO: optimize (use a stack to keep track of pos)
+
+ def iter(m: HashTrieMap[A,B], k: => Stream[(A,B)]): Stream[(A,B)] = {
+ def horiz(elems: Array[HashMap[A,B]], i: Int, k: => Stream[(A,B)]): Stream[(A,B)] = {
+ if (i < elems.length) {
+ elems(i) match {
+ case m: HashTrieMap[A,B] => iter(m, horiz(elems, i+1, k))
+ case m: HashMap1[A,B] => new Stream.Cons(m.ensurePair, horiz(elems, i+1, k))
+ }
+ } else k
+ }
+ horiz(m.elems, 0, k)
+ }
+ iter(this, Stream.empty).iterator
+ }
+*/
+
+
+ override def iterator = new Iterator[(A,B)] {
+ private[this] var depth = 0
+ private[this] var arrayStack = new Array[Array[HashMap[A,B]]](6)
+ private[this] var posStack = new Array[Int](6)
+
+ private[this] var arrayD = elems
+ private[this] var posD = 0
+
+ private[this] var subIter: Iterator[(A,B)] = null // to traverse collision nodes
+
+ def hasNext = (subIter ne null) || depth >= 0
+
+ def next: (A,B) = {
+ if (subIter ne null) {
+ val el = subIter.next
+ if (!subIter.hasNext)
+ subIter = null
+ el
+ } else
+ next0(arrayD, posD)
+ }
+
+ @scala.annotation.tailrec private[this] def next0(elems: Array[HashMap[A,B]], i: Int): (A,B) = {
+ if (i == elems.length-1) { // reached end of level, pop stack
+ depth -= 1
+ if (depth >= 0) {
+ arrayD = arrayStack(depth)
+ posD = posStack(depth)
+ arrayStack(depth) = null
+ } else {
+ arrayD = null
+ posD = 0
+ }
+ } else
+ posD += 1
+
+ elems(i) match {
+ case m: HashTrieMap[A,B] => // push current pos onto stack and descend
+ if (depth >= 0) {
+ arrayStack(depth) = arrayD
+ posStack(depth) = posD
+ }
+ depth += 1
+ arrayD = m.elems
+ posD = 0
+ next0(m.elems, 0)
+ case m: HashMap1[A,B] => m.ensurePair
+ case m =>
+ subIter = m.iterator
+ subIter.next
+ }
+ }
+ }
+
+/*
+
+import collection.immutable._
+def time(block: =>Unit) = { val t0 = System.nanoTime; block; println("elapsed: " + (System.nanoTime - t0)/1000000.0) }
+var mOld = OldHashMap.empty[Int,Int]
+var mNew = HashMap.empty[Int,Int]
+time { for (i <- 0 until 100000) mOld = mOld.updated(i,i) }
+time { for (i <- 0 until 100000) mOld = mOld.updated(i,i) }
+time { for (i <- 0 until 100000) mOld = mOld.updated(i,i) }
+time { for (i <- 0 until 100000) mNew = mNew.updated(i,i) }
+time { for (i <- 0 until 100000) mNew = mNew.updated(i,i) }
+time { for (i <- 0 until 100000) mNew = mNew.updated(i,i) }
+time { mOld.iterator.foreach( p => ()) }
+time { mOld.iterator.foreach( p => ()) }
+time { mOld.iterator.foreach( p => ()) }
+time { mNew.iterator.foreach( p => ()) }
+time { mNew.iterator.foreach( p => ()) }
+time { mNew.iterator.foreach( p => ()) }
+
+*/
+
+
+ override def foreach[U](f: ((A, B)) => U): Unit = {
+ var i = 0;
+ while (i < elems.length) {
+ elems(i).foreach(f)
+ i += 1
+ }
+ }
+
+
+ private def writeObject(out: java.io.ObjectOutputStream) {
+ // no out.defaultWriteObject()
+ out.writeInt(size)
+ foreach { p =>
+ out.writeObject(p._1)
+ out.writeObject(p._2)
+ }
+ }
+
+ private def readObject(in: java.io.ObjectInputStream) {
+ val size = in.readInt
+ var index = 0
+ var m = HashMap.empty[A,B]
+ while (index < size) {
+ // TODO: optimize (use unsafe mutable update)
+ m = m + ((in.readObject.asInstanceOf[A], in.readObject.asInstanceOf[B]))
+ index += 1
+ }
+ var tm = m.asInstanceOf[HashTrieMap[A,B]]
+ bitmap = tm.bitmap
+ elems = tm.elems
+ size0 = tm.size0
+ }
+
+ }
+
}
diff --git a/src/library/scala/collection/immutable/HashSet.scala b/src/library/scala/collection/immutable/HashSet.scala
index 2320187be9..16d4473de1 100644
--- a/src/library/scala/collection/immutable/HashSet.scala
+++ b/src/library/scala/collection/immutable/HashSet.scala
@@ -13,148 +13,353 @@ package scala.collection
package immutable
import generic._
+import annotation.unchecked.uncheckedVariance
/** <p>
- * This class implements immutable sets using a hash table.
- * </p>
- * <p>
- * It is optimized for sequential accesses where the last updated table is
- * accessed most often. It supports with reasonable efficiency accesses to
- * previous versions of the table by keeping a change log that's regularly
- * compacted. It needs to synchronize most methods, so it is less suitable
- * for highly concurrent accesses.
+ * This class implements immutable sets using a hash trie.
* </p>
*
* @note the builder of a hash set returns specialized representations EmptySet,Set1,..., Set4
* for sets of size <= 4.
*
* @author Martin Odersky
+ * @author Tiark Rompf
* @version 2.8
* @since 2.3
*/
-@serializable @SerialVersionUID(1L)
+@serializable @SerialVersionUID(2L)
class HashSet[A] extends Set[A]
with GenericSetTemplate[A, HashSet]
- with SetLike[A, HashSet[A]]
- with mutable.FlatHashTable[A] {
+ with SetLike[A, HashSet[A]] {
override def companion: GenericCompanion[HashSet] = HashSet
- @transient protected var later: HashSet[A] = null
- @transient protected var changedElem: A = _
- @transient protected var deleted: Boolean = _
-
- def contains(elem: A): Boolean = synchronized {
- var m = this
- var cnt = 0
- while (m.later != null) {
- if (elem == m.changedElem) return m.deleted
- cnt += 1
- m = m.later
- }
- if (cnt > logLimit) makeCopy(m)
- m.containsEntry(elem)
- }
+ //class HashSet[A] extends Set[A] with SetLike[A, HashSet[A]] {
- def + (elem: A): HashSet[A] = synchronized {
- makeCopyIfUpdated()
- if (containsEntry(elem)) this
- else {
- markUpdated(elem, false)
- later addEntry elem
- later
- }
- }
+ override def size: Int = 0
- def - (elem: A): HashSet[A] = synchronized {
- makeCopyIfUpdated()
- if (!containsEntry(elem)) this
- else {
- markUpdated(elem, true)
- later removeEntry elem
- later
- }
- }
+ override def empty = HashSet.empty[A]
- override def size: Int = synchronized {
- var m = this
- var cnt = 0
- var s = 0
- while (m.later != null) {
- if (m.deleted) s += 1 else s -= 1
- cnt += 1
- m = m.later
- }
- s += m.tableSize
- if (cnt > logLimit) makeCopy(m)
- s
- }
+ def iterator: Iterator[A] = Iterator.empty
- override def iterator = synchronized {
- makeCopyIfUpdated()
- // note need to cache because (later versions of) set might be mutated while elements are traversed.
- val cached = new mutable.ArrayBuffer() ++= super.iterator
- cached.iterator
- }
+ override def foreach[U](f: A => U): Unit = { }
- private def logLimit: Int = math.sqrt(table.length).toInt
-
- private def markUpdated(elem: A, del: Boolean) {
- val lf = loadFactor
- later = new HashSet[A] {
- override def initialSize = 0
- /* We need to do this to avoid a reference to the outer HashMap */
- def _newLoadFactor = lf
- override def loadFactor = _newLoadFactor
- table = HashSet.this.table
- tableSize = HashSet.this.tableSize
- threshold = HashSet.this.threshold
- }
- changedElem = elem
- deleted = del
- }
+ def contains(e: A): Boolean = get0(e, computeHash(e), 0)
- private def makeCopy(last: HashSet[A]) {
- def undo(m: HashSet[A]) {
- if (m.deleted) addEntry(m.changedElem)
- else removeEntry(m.changedElem)
- }
- table = new scala.Array[AnyRef](last.table.length)
- scala.Array.copy(last.table, 0, table, 0, table.length)
- tableSize = last.tableSize
- threshold = last.threshold
-
- // we need to work from the end of the list but non-tail-recursion
- // potentially blows the stack, so instead we create a stack on the heap.
- // See ticket #408.
- val toUndo = new mutable.Stack[HashSet[A]]
- toUndo pushAll ((Iterator iterate this)(_.later) takeWhile (_ ne last))
- toUndo foreach undo
- later = null
- }
+ override def + (e: A): HashSet[A] = updated0(e, computeHash(e), 0)
- private def makeCopyIfUpdated() {
- var m = this
- while (m.later != null) m = m.later
- if (m ne this) makeCopy(m)
- }
+ override def + (elem1: A, elem2: A, elems: A*): HashSet[A] =
+ this + elem1 + elem2 ++ elems
+ // TODO: optimize (might be able to use mutable updates)
- private def writeObject(s: java.io.ObjectOutputStream) {
- serializeTo(s)
- }
+ def - (e: A): HashSet[A] =
+ removed0(e, computeHash(e), 0)
+
+ protected def elemHashCode(key: A) = if (key == null) 0 else key.hashCode()
- private def readObject(in: java.io.ObjectInputStream) {
- init(in, x => x)
+ protected final def improve(hcode: Int) = {
+ var h: Int = hcode + ~(hcode << 9)
+ h = h ^ (h >>> 14)
+ h = h + (h << 4)
+ h ^ (h >>> 10)
}
+
+ protected def computeHash(key: A) = improve(elemHashCode(key))
+
+ protected def get0(key: A, hash: Int, level: Int): Boolean = false
+
+ protected def updated0(key: A, hash: Int, level: Int): HashSet[A] =
+ new HashSet.HashSet1(key, hash)
+
+
+
+ protected def removed0(key: A, hash: Int, level: Int): HashSet[A] = this
+
}
+/*
+object HashSet extends SetFactory[HashSet] {
+ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, HashSet[A]] = setCanBuildFrom[A]
+ override def empty[A]: HashSet[A] = new HashSet
+}
+*/
+
+
/** A factory object for immutable HashSets.
*
* @author Martin Odersky
+ * @author Tiark Rompf
* @version 2.8
* @since 2.3
*/
object HashSet extends SetFactory[HashSet] {
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, HashSet[A]] = setCanBuildFrom[A]
- override def empty[A]: HashSet[A] = new HashSet
+ override def empty[A]: HashSet[A] = EmptyHashSet.asInstanceOf[HashSet[A]]
+
+ private object EmptyHashSet extends HashSet[Any] {
+ }
+
+ // TODO: add HashSet2, HashSet3, ...
+
+ class HashSet1[A](private[HashSet] var key: A, private[HashSet] var hash: Int) extends HashSet[A] {
+ override def size = 1
+
+ override def get0(key: A, hash: Int, level: Int): Boolean =
+ (hash == this.hash && key == this.key)
+
+ override def updated0(key: A, hash: Int, level: Int): HashSet[A] =
+ if (hash == this.hash && key == this.key) this
+ else {
+ if (hash != this.hash) {
+ //new HashTrieSet[A](level+5, this, new HashSet1(key, hash))
+ val m = new HashTrieSet[A](0,new Array[HashSet[A]](0),0) // TODO: could save array alloc
+ m.updated0(this.key, this.hash, level).updated0(key, hash, level)
+ } else {
+ // 32-bit hash collision (rare, but not impossible)
+ // wrap this in a HashTrieSet if called with level == 0 (otherwise serialization won't work)
+ if (level == 0) {
+ val elems = new Array[HashSet[A]](1)
+ elems(0) = new HashSetCollision1(hash, ListSet.empty + this.key + key)
+ new HashTrieSet[A](1 << ((hash >>> level) & 0x1f), elems, 2)
+ } else {
+ new HashSetCollision1(hash, ListSet.empty + this.key + key)
+ }
+ }
+ }
+
+ override def removed0(key: A, hash: Int, level: Int): HashSet[A] =
+ if (hash == this.hash && key == this.key) HashSet.empty[A] else this
+
+ override def iterator: Iterator[A] = Iterator(key)
+ override def foreach[U](f: A => U): Unit = f(key)
+
+ private def writeObject(out: java.io.ObjectOutputStream) {
+ out.writeObject(key)
+ }
+
+ private def readObject(in: java.io.ObjectInputStream) {
+ key = in.readObject().asInstanceOf[A]
+ hash = computeHash(key)
+ }
+
+ }
+
+ private class HashSetCollision1[A](private[HashSet] var hash: Int, var ks: ListSet[A]) extends HashSet[A] {
+ override def size = ks.size
+
+ override def get0(key: A, hash: Int, level: Int): Boolean =
+ if (hash == this.hash) ks.contains(key) else false
+
+ override def updated0(key: A, hash: Int, level: Int): HashSet[A] =
+ if (hash == this.hash) new HashSetCollision1(hash, ks + key)
+ else {
+ var m: HashSet[A] = new HashTrieSet[A](0,new Array[HashSet[A]](0),0)
+ // might be able to save some ops here, but it doesn't seem to be worth it
+ for (k <- ks)
+ m = m.updated0(k, this.hash, level)
+ m.updated0(key, hash, level)
+ }
+
+ override def removed0(key: A, hash: Int, level: Int): HashSet[A] =
+ if (hash == this.hash) {
+ val ks1 = ks - key
+ if (!ks1.isEmpty)
+ new HashSetCollision1(hash, ks1)
+ else
+ HashSet.empty[A]
+ } else this
+
+ override def iterator: Iterator[A] = ks.iterator
+ override def foreach[U](f: A => U): Unit = ks.foreach(f)
+
+ private def writeObject(out: java.io.ObjectOutputStream) {
+ // this cannot work - reading things in might produce different
+ // hash codes and remove the collision. however this is never called
+ // because no references to this class are ever handed out to client code
+ // and HashTrieSet serialization takes care of the situation
+ error("cannot serialize an immutable.HashSet where all items have the same 32-bit hash code")
+ //out.writeObject(kvs)
+ }
+
+ private def readObject(in: java.io.ObjectInputStream) {
+ error("cannot deserialize an immutable.HashSet where all items have the same 32-bit hash code")
+ //kvs = in.readObject().asInstanceOf[ListSet[A]]
+ //hash = computeHash(kvs.)
+ }
+
+ }
+
+
+ class HashTrieSet[A](private var bitmap: Int, private var elems: Array[HashSet[A]],
+ private var size0: Int) extends HashSet[A] {
+
+ override def size = size0
+
+ override def get0(key: A, hash: Int, level: Int): Boolean = {
+ val index = (hash >>> level) & 0x1f
+ val mask = (1 << index)
+ if (bitmap == - 1) {
+ elems(index & 0x1f).get0(key, hash, level + 5)
+ } else if ((bitmap & mask) != 0) {
+ val offset = Integer.bitCount(bitmap & (mask-1))
+ // TODO: might be worth checking if sub is HashTrieSet (-> monomorphic call site)
+ elems(offset).get0(key, hash, level + 5)
+ } else
+ false
+ }
+
+ override def updated0(key: A, hash: Int, level: Int): HashSet[A] = {
+ val index = (hash >>> level) & 0x1f
+ val mask = (1 << index)
+ val offset = Integer.bitCount(bitmap & (mask-1))
+ if ((bitmap & mask) != 0) {
+ val elemsNew = new Array[HashSet[A]](elems.length)
+ Array.copy(elems, 0, elemsNew, 0, elems.length)
+ val sub = elems(offset)
+ // TODO: might be worth checking if sub is HashTrieSet (-> monomorphic call site)
+ val subNew = sub.updated0(key, hash, level + 5)
+ elemsNew(offset) = subNew
+ new HashTrieSet(bitmap, elemsNew, size + (subNew.size - sub.size))
+ } else {
+ val elemsNew = new Array[HashSet[A]](elems.length + 1)
+ Array.copy(elems, 0, elemsNew, 0, offset)
+ elemsNew(offset) = new HashSet1(key, hash)
+ Array.copy(elems, offset, elemsNew, offset + 1, elems.length - offset)
+ val bitmapNew = bitmap | mask
+ new HashTrieSet(bitmapNew, elemsNew, size + 1)
+ }
+ }
+
+ override def removed0(key: A, hash: Int, level: Int): HashSet[A] = {
+ val index = (hash >>> level) & 0x1f
+ val mask = (1 << index)
+ val offset = Integer.bitCount(bitmap & (mask-1))
+ if (((bitmap >>> index) & 1) == 1) {
+ val elemsNew = new Array[HashSet[A]](elems.length)
+ Array.copy(elems, 0, elemsNew, 0, elems.length)
+ val sub = elems(offset)
+ // TODO: might be worth checking if sub is HashTrieSet (-> monomorphic call site)
+ val subNew = sub.removed0(key, hash, level + 5)
+ elemsNew(offset) = subNew
+ // TODO: handle shrinking
+ val sizeNew = size + (subNew.size - sub.size)
+ if (sizeNew > 0)
+ new HashTrieSet(bitmap, elemsNew, size + (subNew.size - sub.size))
+ else
+ HashSet.empty[A]
+ } else {
+ this
+ }
+ }
+
+ override def iterator = new Iterator[A] {
+ private[this] var depth = 0
+ private[this] var arrayStack = new Array[Array[HashSet[A]]](6)
+ private[this] var posStack = new Array[Int](6)
+
+ private[this] var arrayD = elems
+ private[this] var posD = 0
+
+ private[this] var subIter: Iterator[A] = null // to traverse collision nodes
+
+ def hasNext = (subIter ne null) || depth >= 0
+
+ def next: A = {
+ if (subIter ne null) {
+ val el = subIter.next
+ if (!subIter.hasNext)
+ subIter = null
+ el
+ } else
+ next0(arrayD, posD)
+ }
+
+ @scala.annotation.tailrec private[this] def next0(elems: Array[HashSet[A]], i: Int): A = {
+ if (i == elems.length-1) { // reached end of level, pop stack
+ depth -= 1
+ if (depth >= 0) {
+ arrayD = arrayStack(depth)
+ posD = posStack(depth)
+ arrayStack(depth) = null
+ } else {
+ arrayD = null
+ posD = 0
+ }
+ } else
+ posD += 1
+
+ elems(i) match {
+ case m: HashTrieSet[A] => // push current pos onto stack and descend
+ if (depth >= 0) {
+ arrayStack(depth) = arrayD
+ posStack(depth) = posD
+ }
+ depth += 1
+ arrayD = m.elems
+ posD = 0
+ next0(m.elems, 0)
+ case m: HashSet1[A] => m.key
+ case m =>
+ subIter = m.iterator
+ subIter.next
+ }
+ }
+ }
+
+/*
+
+import collection.immutable._
+def time(block: =>Unit) = { val t0 = System.nanoTime; block; println("elapsed: " + (System.nanoTime - t0)/1000000.0) }
+var mOld = OldHashSet.empty[Int]
+var mNew = HashSet.empty[Int]
+time { for (i <- 0 until 100000) mOld = mOld + i }
+time { for (i <- 0 until 100000) mOld = mOld + i }
+time { for (i <- 0 until 100000) mOld = mOld + i }
+time { for (i <- 0 until 100000) mNew = mNew + i }
+time { for (i <- 0 until 100000) mNew = mNew + i }
+time { for (i <- 0 until 100000) mNew = mNew + i }
+time { mOld.iterator.foreach( p => ()) }
+time { mOld.iterator.foreach( p => ()) }
+time { mOld.iterator.foreach( p => ()) }
+time { mNew.iterator.foreach( p => ()) }
+time { mNew.iterator.foreach( p => ()) }
+time { mNew.iterator.foreach( p => ()) }
+
+*/
+
+
+ override def foreach[U](f: A => U): Unit = {
+ var i = 0;
+ while (i < elems.length) {
+ elems(i).foreach(f)
+ i += 1
+ }
+ }
+
+
+ private def writeObject(out: java.io.ObjectOutputStream) {
+ // no out.defaultWriteObject()
+ out.writeInt(size)
+ foreach { e =>
+ out.writeObject(e)
+ }
+ }
+
+ private def readObject(in: java.io.ObjectInputStream) {
+ val size = in.readInt
+ var index = 0
+ var m = HashSet.empty[A]
+ while (index < size) {
+ // TODO: optimize (use unsafe mutable update)
+ m = m + in.readObject.asInstanceOf[A]
+ index += 1
+ }
+ var tm = m.asInstanceOf[HashTrieSet[A]]
+ bitmap = tm.bitmap
+ elems = tm.elems
+ size0 = tm.size0
+ }
+
+ }
+
}
diff --git a/src/library/scala/collection/immutable/IndexedSeq.scala b/src/library/scala/collection/immutable/IndexedSeq.scala
index 0d7b1b0d23..3f29052808 100644
--- a/src/library/scala/collection/immutable/IndexedSeq.scala
+++ b/src/library/scala/collection/immutable/IndexedSeq.scala
@@ -14,8 +14,9 @@ package immutable
import generic._
import mutable.{ArrayBuffer, Builder}
-/** A subtrait of <code>collection.IndexedSeq</code> which represents sequences
+/** A subtrait of <code>collection.IndexedSeq</code> which represents indexed sequences
* that cannot be mutated.
+ * $indexedSeqInfo
*
* @since 2.8
*/
@@ -36,5 +37,5 @@ object IndexedSeq extends SeqFactory[IndexedSeq] {
def apply(idx: Int) = buf.apply(idx)
}
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, IndexedSeq[A]] = new GenericCanBuildFrom[A]
- def newBuilder[A]: Builder[A, IndexedSeq[A]] = new ArrayBuffer[A] mapResult (buf => new Impl(buf))
-} \ No newline at end of file
+ def newBuilder[A]: Builder[A, IndexedSeq[A]] = Vector.newBuilder[A]
+}
diff --git a/src/library/scala/collection/immutable/IntMap.scala b/src/library/scala/collection/immutable/IntMap.scala
index 52451e6012..62309a9f48 100644
--- a/src/library/scala/collection/immutable/IntMap.scala
+++ b/src/library/scala/collection/immutable/IntMap.scala
@@ -151,7 +151,10 @@ import IntMap._
* <a href="http://citeseer.ist.psu.edu/okasaki98fast.html">Fast Mergeable Integer Maps</a>
* by Okasaki and Gill. Essentially a trie based on binary digits of the the integers.
*
+ * Note: This class is as of 2.8 largely superseded by HashMap.
+ *
* @since 2.7
+ *
*/
sealed abstract class IntMap[+T] extends Map[Int, T] with MapLike[Int, T, IntMap[T]] {
override def empty: IntMap[T] = IntMap.Nil;
@@ -357,7 +360,7 @@ sealed abstract class IntMap[+T] extends Map[Int, T] with MapLike[Int, T, IntMap
}
/**
- * Forms the intersection of these two maps with a combinining function. The resulting map is
+ * Forms the intersection of these two maps with a combining function. The resulting map is
* a map that has only keys present in both maps and has values produced from the original mappings
* by combining them with f.
*
diff --git a/src/library/scala/collection/immutable/LinearSeq.scala b/src/library/scala/collection/immutable/LinearSeq.scala
index c1efea037c..016afd4508 100644
--- a/src/library/scala/collection/immutable/LinearSeq.scala
+++ b/src/library/scala/collection/immutable/LinearSeq.scala
@@ -17,7 +17,7 @@ import mutable.Builder
/** A subtrait of <code>collection.LinearSeq</code> which represents sequences
* that cannot be mutated.
- *
+ * $linearSeqInfo
* @since 2.8
*/
trait LinearSeq[+A] extends Seq[A]
diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala
index 2088f3ac78..2b91ab8852 100644
--- a/src/library/scala/collection/immutable/List.scala
+++ b/src/library/scala/collection/immutable/List.scala
@@ -46,7 +46,7 @@ import annotation.tailrec
sealed abstract class List[+A] extends LinearSeq[A]
with Product
with GenericTraversableTemplate[A, List]
- with LinearSeqLike[A, List[A]] {
+ with LinearSeqOptimized[A, List[A]] {
override def companion: GenericCompanion[List] = List
import scala.collection.{Iterable, Traversable, Seq, IndexedSeq}
@@ -61,7 +61,7 @@ sealed abstract class List[+A] extends LinearSeq[A]
* @param x the element to prepend.
* @return a list which contains `x` as first element and
* which continues with this list.
- * @ex `1 :: List(2, 3) = List(2, 3).::(1) = List(1, 2, 3)`
+ * @example `1 :: List(2, 3) = List(2, 3).::(1) = List(1, 2, 3)`
* @usecase def ::(x: A): List[A]
*/
def ::[B >: A] (x: B): List[B] =
@@ -71,7 +71,7 @@ sealed abstract class List[+A] extends LinearSeq[A]
* @param prefix The list elements to prepend.
* @return a list resulting from the concatenation of the given
* list `prefix` and this list.
- * @ex `List(1, 2) ::: List(3, 4) = List(3, 4).:::(List(1, 2)) = List(1, 2, 3, 4)`
+ * @example `List(1, 2) ::: List(3, 4) = List(3, 4).:::(List(1, 2)) = List(1, 2, 3, 4)`
* @usecase def :::(prefix: List[A]): List[A]
*/
def :::[B >: A](prefix: List[B]): List[B] =
@@ -133,16 +133,18 @@ sealed abstract class List[+A] extends LinearSeq[A]
loop(this)
}
- // Overridden methods from IterableLike or overloaded variants of such methods
+ // Overridden methods from IterableLike and SeqLike or overloaded variants of such methods
- override def ++[B >: A, That](that: Traversable[B])(implicit bf: CanBuildFrom[List[A], B, That]): That = {
+ override def ++[B >: A, That](that: TraversableOnce[B])(implicit bf: CanBuildFrom[List[A], B, That]): That = {
val b = bf(this)
if (b.isInstanceOf[ListBuffer[_]]) (this ::: that.toList).asInstanceOf[That]
else super.++(that)
}
- override def ++[B >: A, That](that: Iterator[B])(implicit bf: CanBuildFrom[List[A], B, That]): That =
- this ++ that.toList
+ override def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[List[A], B, That]): That = bf match {
+ case _: List.GenericCanBuildFrom[_] => (elem :: this).asInstanceOf[That]
+ case _ => super.+:(elem)(bf)
+ }
override def toList: List[A] = this
@@ -288,6 +290,9 @@ sealed abstract class List[+A] extends LinearSeq[A]
b.toList
}
+ @deprecated("use `distinct' instead")
+ def removeDuplicates: List[A] = distinct
+
/** <p>
* Sort the list according to the comparison function
* `lt(e1: a, e2: a) =&gt; Boolean`,
@@ -299,7 +304,7 @@ sealed abstract class List[+A] extends LinearSeq[A]
* @param lt the comparison function
* @return a list sorted according to the comparison function
* `lt(e1: a, e2: a) =&gt; Boolean`.
- * @ex <pre>
+ * @example <pre>
* List("Steve", "Tom", "John", "Bob")
* .sort((e1, e2) => (e1 compareTo e2) &lt; 0) =
* List("Bob", "John", "Steve", "Tom")</pre>
@@ -383,7 +388,7 @@ case object Nil extends List[Nothing] {
throw new NoSuchElementException("head of empty list")
override def tail: List[Nothing] =
throw new UnsupportedOperationException("tail of empty list")
- // Removal of equals method here might lead to an infinite recusion similar to IntMap.equals.
+ // Removal of equals method here might lead to an infinite recursion similar to IntMap.equals.
override def equals(that: Any) = that match {
case that1: Seq[_] => that1.isEmpty
case _ => false
@@ -539,7 +544,7 @@ object List extends SeqFactory[List] {
* Returns the `Left` values in the given `Iterable`
* of `Either`s.
*/
- @deprecated("use `xs partialMap { case Left(x: A) => x }' instead of `List.lefts(xs)'")
+ @deprecated("use `xs collect { case Left(x: A) => x }' instead of `List.lefts(xs)'")
def lefts[A, B](es: Iterable[Either[A, B]]) =
es.foldRight[List[A]](Nil)((e, as) => e match {
case Left(a) => a :: as
@@ -549,7 +554,7 @@ object List extends SeqFactory[List] {
/**
* Returns the `Right` values in the given`Iterable` of `Either`s.
*/
- @deprecated("use `xs partialMap { case Right(x: B) => x }' instead of `List.rights(xs)'")
+ @deprecated("use `xs collect { case Right(x: B) => x }' instead of `List.rights(xs)'")
def rights[A, B](es: Iterable[Either[A, B]]) =
es.foldRight[List[B]](Nil)((e, bs) => e match {
case Left(_) => bs
@@ -561,9 +566,9 @@ object List extends SeqFactory[List] {
* @param xs the iterable of Eithers to separate
* @return a pair of lists.
*/
- @deprecated("use `Either.separate' instead")
+ @deprecated("use `(for (Left(x) <- es) yield x, for (Right(x) <- es) yield x)` instead")
def separate[A,B](es: Iterable[Either[A, B]]): (List[A], List[B]) =
- es.foldRight[(List[A], List[B])]((Nil, Nil)) {
+ es.foldRight[(List[A], List[B])]((Nil, Nil)) {
case (Left(a), (lefts, rights)) => (a :: lefts, rights)
case (Right(b), (lefts, rights)) => (lefts, b :: rights)
}
@@ -590,7 +595,7 @@ object List extends SeqFactory[List] {
*
* @param arr the array to convert
* @param start the first index to consider
- * @param len the lenght of the range to convert
+ * @param len the length of the range to convert
* @return a list that contains the same elements than `arr`
* in the same order
*/
diff --git a/src/library/scala/collection/immutable/ListMap.scala b/src/library/scala/collection/immutable/ListMap.scala
index 0f66a1f452..d8e3e0856b 100644
--- a/src/library/scala/collection/immutable/ListMap.scala
+++ b/src/library/scala/collection/immutable/ListMap.scala
@@ -30,7 +30,7 @@ object ListMap extends ImmutableMapFactory[ListMap] {
* directly, or by applying the function <code>ListMap.empty</code>.
*
* @author Matthias Zenger
- * @author Martin Oderskty
+ * @author Martin Odersky
* @version 2.0, 01/01/2007
* @since 1
*/
diff --git a/src/library/scala/collection/immutable/LongMap.scala b/src/library/scala/collection/immutable/LongMap.scala
index e527712475..0d74e41cec 100644
--- a/src/library/scala/collection/immutable/LongMap.scala
+++ b/src/library/scala/collection/immutable/LongMap.scala
@@ -138,6 +138,8 @@ import LongMap._;
* <a href="http://citeseer.ist.psu.edu/okasaki98fast.html">Fast Mergeable Long Maps</a>
* by Okasaki and Gill. Essentially a trie based on binary digits of the the integers.
*
+ * Note: This class is as of 2.8 largely superseded by HashMap.
+ *
* @since 2.7
*/
sealed abstract class LongMap[+T] extends Map[Long, T] with MapLike[Long, T, LongMap[T]] {
@@ -344,7 +346,7 @@ sealed abstract class LongMap[+T] extends Map[Long, T] with MapLike[Long, T, Lon
}
/**
- * Forms the intersection of these two maps with a combinining function. The resulting map is
+ * Forms the intersection of these two maps with a combining function. The resulting map is
* a map that has only keys present in both maps and has values produced from the original mappings
* by combining them with f.
*
diff --git a/src/library/scala/collection/immutable/Map.scala b/src/library/scala/collection/immutable/Map.scala
index f42794d09e..b5a852683a 100644
--- a/src/library/scala/collection/immutable/Map.scala
+++ b/src/library/scala/collection/immutable/Map.scala
@@ -44,7 +44,7 @@ trait Map[A, +B] extends Iterable[(A, B)]
object Map extends ImmutableMapFactory[Map] {
implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), Map[A, B]] = new MapCanBuildFrom[A, B]
- def empty[A, B]: Map[A, B] = new EmptyMap[A, B]
+ def empty[A, B]: Map[A, B] = EmptyMap.asInstanceOf[Map[A, B]]
class WithDefault[A, +B](underlying: Map[A, B], d: A => B) extends Map[A, B] {
override def size = underlying.size
@@ -58,12 +58,22 @@ object Map extends ImmutableMapFactory[Map] {
}
@serializable
- class EmptyMap[A, +B] extends Map[A, B] {
+ private object EmptyMap extends Map[Any, Nothing] {
+ override def size: Int = 0
+ def get(key: Any): Option[Nothing] = None
+ def iterator: Iterator[(Any, Nothing)] = Iterator.empty
+ override def updated [B1] (key: Any, value: B1): Map[Any, B1] = new Map1(key, value)
+ def + [B1](kv: (Any, B1)): Map[Any, B1] = updated(kv._1, kv._2)
+ def - (key: Any): Map[Any, Nothing] = this
+ }
+
+ @serializable @deprecated("use `Map.empty' instead")
+ class EmptyMap[A,B] extends Map[A,B] {
override def size: Int = 0
def get(key: A): Option[B] = None
def iterator: Iterator[(A, B)] = Iterator.empty
- override def updated [B1 >: B] (key: A, value: B1): Map[A, B1] = new Map1(key, value)
- def + [B1 >: B](kv: (A, B1)): Map[A, B1] = updated(kv._1, kv._2)
+ override def updated [B1] (key: A, value: B1): Map[A, B1] = new Map1(key, value)
+ def + [B1](kv: (A, B1)): Map[A, B1] = updated(kv._1, kv._2)
def - (key: A): Map[A, B] = this
}
@@ -78,7 +88,7 @@ object Map extends ImmutableMapFactory[Map] {
else new Map2(key1, value1, key, value)
def + [B1 >: B](kv: (A, B1)): Map[A, B1] = updated(kv._1, kv._2)
def - (key: A): Map[A, B] =
- if (key == key1) empty else this
+ if (key == key1) Map.empty else this
override def foreach[U](f: ((A, B)) => U): Unit = {
f((key1, value1))
}
diff --git a/src/library/scala/collection/immutable/MapLike.scala b/src/library/scala/collection/immutable/MapLike.scala
index a06bce1038..662321bb0c 100644
--- a/src/library/scala/collection/immutable/MapLike.scala
+++ b/src/library/scala/collection/immutable/MapLike.scala
@@ -41,8 +41,9 @@ import generic._
* @version 2.8
* @since 2.8
*/
-trait MapLike[A, +B, +This <: MapLike[A, B, This] with Map[A, B]] extends scala.collection.MapLike[A, B, This] {
-self =>
+trait MapLike[A, +B, +This <: MapLike[A, B, This] with Map[A, B]]
+ extends scala.collection.MapLike[A, B, This]
+{ self =>
import scala.collection.Traversable
@@ -74,16 +75,36 @@ self =>
*
* @param elems the traversable object.
*/
- override def ++[B1 >: B](elems: Traversable[(A, B1)]): immutable.Map[A, B1] =
- ((repr: immutable.Map[A, B1]) /: elems) (_ + _)
+ override def ++[B1 >: B](xs: TraversableOnce[(A, B1)]): immutable.Map[A, B1] =
+ ((repr: immutable.Map[A, B1]) /: xs) (_ + _)
- /** Adds a number of elements provided by an iterator
- * and returns a new collection with the added elements.
- *
- * @param iter the iterator
+ /** Filters this map by retaining only keys satisfying a predicate.
+ * @param p the predicate used to test keys
+ * @return an immutable map consisting only of those key value pairs of this map where the key satisfies
+ * the predicate `p`. The resulting map wraps the original map without copying any elements.
*/
- override def ++[B1 >: B] (iter: Iterator[(A, B1)]): immutable.Map[A, B1] =
- ((repr: immutable.Map[A, B1]) /: iter) (_ + _)
+ override def filterKeys(p: A => Boolean): Map[A, B] = new DefaultMap[A, B] {
+ override def foreach[C](f: ((A, B)) => C): Unit = for (kv <- self) if (p(kv._1)) f(kv)
+ def iterator = self.iterator.filter(kv => p(kv._1))
+ override def contains(key: A) = self.contains(key) && p(key)
+ def get(key: A) = if (!p(key)) None else self.get(key)
+ }
+
+ /** Transforms this map by applying a function to every retrieved value.
+ * @param d the function used to transform values of this map.
+ * @return an immutable map which maps every key of this map
+ * to `f(this(key))`. The resulting map wraps the original map without copying any elements.
+ */
+ /** A map view resulting from applying a given function `f` to each value
+ * associated with a key in this map.
+ */
+ override def mapValues[C](f: B => C): Map[A, C] = new DefaultMap[A, C] {
+ override def foreach[D](g: ((A, C)) => D): Unit = for ((k, v) <- self) g((k, f(v)))
+ def iterator = for ((k, v) <- self.iterator) yield (k, f(v))
+ override def size = self.size
+ override def contains(key: A) = self.contains(key)
+ def get(key: A) = self.get(key).map(f)
+ }
/** This function transforms all the values of mappings contained
* in this map with function <code>f</code>.
@@ -97,23 +118,6 @@ self =>
b.result
}
- /** Returns a new map with all key/value pairs for which the predicate
- * <code>p</code> returns <code>true</code>.
- *
- * @param p A predicate over key-value pairs
- * @note This method works by successively removing elements fro which the
- * predicate is false from this set.
- * If removal is slow, or you expect that most elements of the set$
- * will be removed, you might consider using <code>filter</code>
- * with a negated predicate instead.
- */
- override def filterNot(p: ((A, B)) => Boolean): This = {
- var res: This = repr
- for (kv <- this)
- if (p(kv)) res = (res - kv._1).asInstanceOf[This] // !!! concrete overrides abstract problem
- res
- }
-
@deprecated("use `updated' instead")
def update[B1 >: B](key: A, value: B1): immutable.Map[A, B1] = updated(key, value).asInstanceOf[immutable.Map[A, B1]]
}
diff --git a/src/library/scala/collection/immutable/MapProxy.scala b/src/library/scala/collection/immutable/MapProxy.scala
index 0ef7aa620a..371af042e7 100644
--- a/src/library/scala/collection/immutable/MapProxy.scala
+++ b/src/library/scala/collection/immutable/MapProxy.scala
@@ -37,5 +37,9 @@ trait MapProxy[A, +B] extends Map[A, B] with MapProxyLike[A, B, Map[A, B]]
override def + [B1 >: B](kv: (A, B1)): Map[A, B1] = newProxy(self + kv)
override def + [B1 >: B](elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *) =
newProxy(self.+(elem1, elem2, elems: _*))
+
override def -(key: A) = newProxy(self - key)
+
+ override def filterKeys(p: A => Boolean) = self.filterKeys(p)
+ override def mapValues[C](f: B => C) = self.mapValues(f)
}
diff --git a/src/library/scala/collection/immutable/NumericRange.scala b/src/library/scala/collection/immutable/NumericRange.scala
index 5514f7a24d..d3e4558884 100644
--- a/src/library/scala/collection/immutable/NumericRange.scala
+++ b/src/library/scala/collection/immutable/NumericRange.scala
@@ -34,11 +34,18 @@ import generic._
* @version 2.8
*/
@serializable
-abstract class NumericRange[+T]
+abstract class NumericRange[T]
(val start: T, val end: T, val step: T, val isInclusive: Boolean)
(implicit num: Integral[T])
extends IndexedSeq[T]
{
+ /** Note that NumericRange must be invariant so that constructs
+ * such as
+ *
+ * 1L to 10 by 5
+ *
+ * do not infer the range type as AnyVal.
+ */
import num._
private def fail(msg: String) = throw new IllegalArgumentException(msg)
@@ -56,20 +63,18 @@ extends IndexedSeq[T]
// inclusive/exclusiveness captured this way because we do not have any
// concept of a "unit", we can't just add an epsilon to an exclusive
// endpoint to make it inclusive (as can be done with the int-based Range.)
- protected def limitTest[U >: T](x: U)(implicit unum: Integral[U]) =
- !isEmpty && isInclusive && unum.equiv(x, end)
+ protected def limitTest(x: T) = !isEmpty && isInclusive && equiv(x, end)
protected def underlying = collection.immutable.IndexedSeq.empty[T]
/** Create a new range with the start and end values of this range and
* a new <code>step</code>.
*/
- def by[U >: T](newStep: U)(implicit unum: Integral[U]): NumericRange[U] =
- copy(start, end, newStep)
+ def by(newStep: T): NumericRange[T] = copy(start, end, newStep)
/** Create a copy of this range.
*/
- def copy[U >: T](start: U, end: U, step: U)(implicit unum: Integral[U]): NumericRange[U]
+ def copy(start: T, end: T, step: T): NumericRange[T]
override def foreach[U](f: T => U) {
var i = start
@@ -115,9 +120,8 @@ extends IndexedSeq[T]
}
// a well-typed contains method.
- def containsTyped[U >: T](x: U)(implicit unum: Integral[U]): Boolean = {
- import unum._
- def divides(d: U, by: U) = equiv(d % by, zero)
+ def containsTyped(x: T): Boolean = {
+ def divides(d: T, by: T) = equiv(d % by, zero)
limitTest(x) || (
if (step > zero)
@@ -154,7 +158,7 @@ extends IndexedSeq[T]
// XXX This may be incomplete.
new NumericRange[A](fm(start), fm(end), fm(step), isInclusive) {
- def copy[A1 >: A](start: A1, end: A1, step: A1)(implicit unum: Integral[A1]): NumericRange[A1] =
+ def copy(start: A, end: A, step: A): NumericRange[A] =
if (isInclusive) NumericRange.inclusive(start, end, step)
else NumericRange(start, end, step)
@@ -162,8 +166,7 @@ extends IndexedSeq[T]
override def foreach[U](f: A => U) { underlyingRange foreach (x => f(fm(x))) }
override def isEmpty = underlyingRange.isEmpty
override def apply(idx: Int): A = fm(underlyingRange(idx))
- override def containsTyped[A1 >: A](el: A1)(implicit unum: Integral[A1]) =
- underlyingRange exists (x => fm(x) == el)
+ override def containsTyped(el: A) = underlyingRange exists (x => fm(x) == el)
}
}
@@ -200,7 +203,7 @@ extends IndexedSeq[T]
object NumericRange {
class Inclusive[T](start: T, end: T, step: T)(implicit num: Integral[T])
extends NumericRange(start, end, step, true) {
- def copy[U >: T](start: U, end: U, step: U)(implicit unum: Integral[U]): Inclusive[U] =
+ def copy(start: T, end: T, step: T): Inclusive[T] =
NumericRange.inclusive(start, end, step)
def exclusive: Exclusive[T] = NumericRange(start, end, step)
@@ -208,7 +211,7 @@ object NumericRange {
class Exclusive[T](start: T, end: T, step: T)(implicit num: Integral[T])
extends NumericRange(start, end, step, false) {
- def copy[U >: T](start: U, end: U, step: U)(implicit unum: Integral[U]): Exclusive[U] =
+ def copy(start: T, end: T, step: T): Exclusive[T] =
NumericRange(start, end, step)
def inclusive: Inclusive[T] = NumericRange.inclusive(start, end, step)
diff --git a/src/library/scala/collection/immutable/PagedSeq.scala b/src/library/scala/collection/immutable/PagedSeq.scala
index bde8d67ffe..bd12502520 100644
--- a/src/library/scala/collection/immutable/PagedSeq.scala
+++ b/src/library/scala/collection/immutable/PagedSeq.scala
@@ -202,7 +202,7 @@ private class Page[T: ClassManifest](val num: Int) {
/** The next page in the sequence */
var next : Page[T] = null
- /** A later page in the sequence, serves a cachae for pointing to last page */
+ /** A later page in the sequence, serves a cache for pointing to last page */
var later : Page[T] = this
/** The number of characters read into this page */
@@ -218,11 +218,11 @@ private class Page[T: ClassManifest](val num: Int) {
/** The index of the first character in this page relative to the whole sequence */
final def start = num * PageSize
- /** The index of the character following the last charcater in this page relative
+ /** The index of the character following the last character in this page relative
* to the whole sequence */
final def end = start + filled
- /** The currently last page in the sequence; might change as more charcaters are appended */
+ /** The currently last page in the sequence; might change as more characters are appended */
final def latest: Page[T] = {
if (later.next != null) later = later.next.latest
later
diff --git a/src/library/scala/collection/immutable/Queue.scala b/src/library/scala/collection/immutable/Queue.scala
index 9957f90ab3..02d344ceea 100644
--- a/src/library/scala/collection/immutable/Queue.scala
+++ b/src/library/scala/collection/immutable/Queue.scala
@@ -12,12 +12,8 @@
package scala.collection
package immutable
-import scala.annotation.tailrec
-
-object Queue {
- val Empty: Queue[Nothing] = new Queue(Nil, Nil)
- def apply[A](elems: A*) = new Queue(Nil, elems.toList)
-}
+import generic._
+import mutable.{ Builder, ListBuffer }
/** <code>Queue</code> objects implement data structures that allow to
* insert and retrieve elements in a first-in-first-out (FIFO) manner.
@@ -28,10 +24,13 @@ object Queue {
*/
@serializable
@SerialVersionUID(-7622936493364270175L)
-class Queue[+A] protected(
- protected val in: List[A],
- protected val out: List[A]) extends Seq[A]
-{
+class Queue[+A] protected(protected val in: List[A], protected val out: List[A])
+ extends Seq[A]
+ with GenericTraversableTemplate[A, Queue]
+ with SeqLike[A, Queue[A]] {
+
+ override def companion: GenericCompanion[Queue] = Queue
+
/** Returns the <code>n</code>-th element of this queue.
* The first element is at position 0.
*
@@ -127,3 +126,13 @@ class Queue[+A] protected(
*/
override def toString() = mkString("Queue(", ", ", ")")
}
+
+object Queue extends SeqFactory[Queue] {
+ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Queue[A]] = new GenericCanBuildFrom[A]
+ def newBuilder[A]: Builder[A, Queue[A]] = new ListBuffer[A] mapResult (x => new Queue[A](Nil, x.toList))
+ override def empty[A]: Queue[A] = new Queue[A](Nil, Nil)
+ override def apply[A](xs: A*): Queue[A] = new Queue[A](Nil, xs.toList)
+
+ @deprecated("Use Queue.empty instead")
+ val Empty: Queue[Nothing] = Queue()
+}
diff --git a/src/library/scala/collection/immutable/Range.scala b/src/library/scala/collection/immutable/Range.scala
index 47a97664de..43b11b67be 100644
--- a/src/library/scala/collection/immutable/Range.scala
+++ b/src/library/scala/collection/immutable/Range.scala
@@ -39,25 +39,34 @@ class Range(val start: Int, val end: Int, val step: Int) extends IndexedSeq[Int]
def isInclusive = false
- protected def limit = end
-
override def foreach[U](f: Int => U) {
- var i = start
- while (if (step > 0) i < limit else i > limit) {
+ if (fullLength > 0) {
+ val last = this.last
+ var i = start
+ while (i != last) {
+ f(i)
+ i += step
+ }
f(i)
- i += step
}
}
- lazy val length: Int = {
- def plen(start: Int, limit: Int, step: Int) =
- if (limit <= start) 0 else (limit - start - 1) / step + 1
- if (step > 0) plen(start, limit, step)
- else plen(limit, start, -step)
+ override def last: Int = if (step == 1 || step == -1) {
+ end - step
+ } else {
+ val size = end.toLong - start.toLong
+ val inclusiveLast = (size / step.toLong * step.toLong + start.toLong).toInt
+ if (size % step == 0) inclusiveLast - step else inclusiveLast
}
- final override def isEmpty =
- if (step > 0) start >= limit else start <= limit
+ def length: Int = fullLength.toInt
+
+ protected def fullLength: Long = if (end > start == step > 0 && start != end)
+ ((last.toLong - start.toLong) / step.toLong + 1)
+ else
+ 0
+
+ final override def isEmpty = length == 0
@inline
final def apply(idx: Int): Int = {
@@ -66,12 +75,19 @@ class Range(val start: Int, val end: Int, val step: Int) extends IndexedSeq[Int]
}
// take and drop have to be tolerant of large values without overflowing
- private def locationAfterN(n: Int) = start + step * (0 max n min length)
+ private def locationAfterN(n: Int) = if (n > 0) {
+ if (step > 0)
+ ((start.toLong + step.toLong * n.toLong) min last.toLong).toInt
+ else
+ ((start.toLong + step.toLong * n.toLong) max last.toLong).toInt
+ } else {
+ start
+ }
- final override def take(n: Int): Range = {
- val limit1 = locationAfterN(n)
- if (step > 0) Range(start, limit1 min limit, step)
- else Range(start, limit1 max limit, step)
+ final override def take(n: Int): Range = if (n > 0 && length > 0) {
+ Range(start, locationAfterN(n - 1), step).inclusive
+ } else {
+ Range(start, start, step)
}
final override def drop(n: Int): Range =
@@ -85,7 +101,11 @@ class Range(val start: Int, val end: Int, val step: Int) extends IndexedSeq[Int]
private def skip(p: Int => Boolean): Int = {
var s = start
- while ((if (step > 0) s < limit else s > limit) && p(s)) s += step
+ if (length > 0) {
+ val last = this.last
+ while ((if (step > 0) s <= last else s >= last) && p(s))
+ s += step
+ }
s
}
@@ -103,16 +123,18 @@ class Range(val start: Int, val end: Int, val step: Int) extends IndexedSeq[Int]
final override def dropRight(n: Int): Range = take(length - n)
- final override def reverse: Range = new Range.Inclusive(last, start, -step)
+ final override def reverse: Range = if (length > 0) new Range.Inclusive(last, start, -step) else this
/** Make range inclusive.
- * @pre if (step > 0) end != MaxInt else end != MinInt
*/
def inclusive = new Range.Inclusive(start, end, step)
- def contains(x: Int): Boolean =
- if (step > 0) start <= x && x < limit && (x - start) % step == 0
- else start >= x && x > limit && (start - x) % step == 0
+ final def contains(x: Int): Boolean = if (length > 0) {
+ if (step > 0) start <= x && x <= last && (x - start) % step == 0
+ else start >= x && x >= last && (start - x) % step == 0
+ } else {
+ false
+ }
override def equals(other: Any) = other match {
case x: Range =>
@@ -139,37 +161,45 @@ object Range {
class Inclusive(start: Int, end: Int, step: Int) extends Range(start, end, step) {
override def isInclusive = true
- override protected val limit = end + math.signum(step)
override protected def copy(start: Int, end: Int, step: Int): Range = new Inclusive(start, end, step)
+ override def last: Int = if (step == 1 || step == -1)
+ end
+ else
+ ((end.toLong - start.toLong) / step.toLong * step.toLong + start.toLong).toInt
+ protected override def fullLength: Long = if (end > start == step > 0 || start == end)
+ ((last.toLong - start.toLong) / step.toLong + 1)
+ else
+ 0
}
- /** Make a range from `start` until `end` (exclusive) with step value 1.
+ /** Make a range from `start` until `end` (exclusive) with given step value.
+ * @note step != 0
*/
def apply(start: Int, end: Int, step: Int): Range = new Range(start, end, step)
/** Make an range from `start` to `end` inclusive with step value 1.
- * @pre end != MaxInt
*/
def apply(start: Int, end: Int): Range with ByOne = new Range(start, end, 1) with ByOne
/** Make an inclusive range from start to end with given step value.
- * @pre step != 0
- * @pre if (step > 0) end != MaxInt else end != MinInt
+ * @note step != 0
*/
def inclusive(start: Int, end: Int, step: Int): Range.Inclusive = new Inclusive(start, end, step)
/** Make an inclusive range from start to end with step value 1.
- * @pre end != MaxInt
*/
def inclusive(start: Int, end: Int): Range.Inclusive with ByOne = new Inclusive(start, end, 1) with ByOne
trait ByOne extends Range {
override final def foreach[U](f: Int => U) {
- var i = start
- val l = limit
- while (i < l) {
+ if (length > 0) {
+ val last = this.last
+ var i = start
+ while (i != last) {
+ f(i)
+ i += 1
+ }
f(i)
- i += 1
}
}
}
diff --git a/src/library/scala/collection/immutable/RedBlack.scala b/src/library/scala/collection/immutable/RedBlack.scala
index dfb34552cd..e7b4f3c978 100644
--- a/src/library/scala/collection/immutable/RedBlack.scala
+++ b/src/library/scala/collection/immutable/RedBlack.scala
@@ -12,7 +12,8 @@
package scala.collection
package immutable
-/**
+/** An base class containing the implementations for TreeMaps and TreeSets
+ *
* @since 2.3
*/
@serializable @SerialVersionUID(8691885935445612921L)
diff --git a/src/library/scala/collection/immutable/Set.scala b/src/library/scala/collection/immutable/Set.scala
index be1e86bcdd..1bec1b9a48 100644
--- a/src/library/scala/collection/immutable/Set.scala
+++ b/src/library/scala/collection/immutable/Set.scala
@@ -41,12 +41,22 @@ trait Set[A] extends Iterable[A]
object Set extends SetFactory[Set] {
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Set[A]] = setCanBuildFrom[A]
- override def empty[A]: Set[A] = new EmptySet[A]
+ override def empty[A]: Set[A] = EmptySet.asInstanceOf[Set[A]]
private val hashSeed = "Set".hashCode
/** An optimized representation for immutable empty sets */
@serializable
+ private object EmptySet extends Set[Any] {
+ override def size: Int = 0
+ def contains(elem: Any): Boolean = false
+ def + (elem: Any): Set[Any] = new Set1(elem)
+ def - (elem: Any): Set[Any] = this
+ def iterator: Iterator[Any] = Iterator.empty
+ override def foreach[U](f: Any => U): Unit = {}
+ }
+
+ @serializable @deprecated("use `Set.empty' instead")
class EmptySet[A] extends Set[A] {
override def size: Int = 0
def contains(elem: A): Boolean = false
@@ -66,7 +76,7 @@ object Set extends SetFactory[Set] {
if (contains(elem)) this
else new Set2(elem1, elem)
def - (elem: A): Set[A] =
- if (elem == elem1) new EmptySet[A]
+ if (elem == elem1) Set.empty
else this
def iterator: Iterator[A] =
Iterator(elem1)
diff --git a/src/library/scala/collection/immutable/SortedMap.scala b/src/library/scala/collection/immutable/SortedMap.scala
index 316cab9b50..919b529a49 100644
--- a/src/library/scala/collection/immutable/SortedMap.scala
+++ b/src/library/scala/collection/immutable/SortedMap.scala
@@ -31,6 +31,8 @@ trait SortedMap[A, +B] extends Map[A, B]
override protected[this] def newBuilder : Builder[(A, B), SortedMap[A, B]] =
SortedMap.newBuilder[A, B]
+ override def empty: SortedMap[A, B] = SortedMap.empty
+
override def updated [B1 >: B](key: A, value: B1): SortedMap[A, B1] = this + ((key, value))
/** Add a key/value pair to this map.
@@ -56,16 +58,8 @@ trait SortedMap[A, +B] extends Map[A, B]
*
* @param elems the traversable object.
*/
- override def ++[B1 >: B](elems: scala.collection.Traversable[(A, B1)]): SortedMap[A, B1] =
- ((repr: SortedMap[A, B1]) /: elems) (_ + _)
-
- /** Adds a number of elements provided by an iterator
- * and returns a new collection with the added elements.
- *
- * @param iter the iterator
- */
- override def ++[B1 >: B] (iter: Iterator[(A, B1)]): SortedMap[A, B1] =
- ((repr: SortedMap[A, B1]) /: iter) (_ + _)
+ override def ++[B1 >: B](xs: TraversableOnce[(A, B1)]): SortedMap[A, B1] =
+ ((repr: SortedMap[A, B1]) /: xs) (_ + _)
}
/**
diff --git a/src/library/scala/collection/immutable/Stack.scala b/src/library/scala/collection/immutable/Stack.scala
index a5d7e9515a..640fb39af5 100644
--- a/src/library/scala/collection/immutable/Stack.scala
+++ b/src/library/scala/collection/immutable/Stack.scala
@@ -37,7 +37,7 @@ object Stack extends SeqFactory[Stack] {
@serializable @SerialVersionUID(1976480595012942526L)
class Stack[+A] protected (protected val elems: List[A]) extends LinearSeq[A]
with GenericTraversableTemplate[A, Stack]
- with LinearSeqLike[A, Stack[A]] {
+ with LinearSeqOptimized[A, Stack[A]] {
override def companion: GenericCompanion[Stack] = Stack
def this() = this(Nil)
@@ -74,18 +74,8 @@ class Stack[+A] protected (protected val elems: List[A]) extends LinearSeq[A]
* @param elems the iterator object.
* @return the stack with the new elements on top.
*/
- def pushAll[B >: A](elems: Iterator[B]): Stack[B] =
- ((this: Stack[B]) /: elems)(_ push _)
-
- /** Push all elements provided by the given traversable object onto
- * the stack. The last element returned by the iterable object
- * will be on top of the new stack.
- *
- * @param elems the iterable object.
- * @return the stack with the new elements on top.
- */
- def pushAll[B >: A](elems: scala.collection.Traversable[B]): Stack[B] =
- ((this: Stack[B]) /: elems)(_ push _)
+ def pushAll[B >: A](xs: TraversableOnce[B]): Stack[B] =
+ ((this: Stack[B]) /: xs.toIterator)(_ push _)
/** Returns the top element of the stack. An error is signaled if
* there is no element on the stack.
diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala
index 1377fbe59d..3b10963ddb 100644
--- a/src/library/scala/collection/immutable/Stream.scala
+++ b/src/library/scala/collection/immutable/Stream.scala
@@ -40,7 +40,7 @@ import scala.annotation.tailrec
*/
abstract class Stream[+A] extends LinearSeq[A]
with GenericTraversableTemplate[A, Stream]
- with LinearSeqLike[A, Stream[A]] {
+ with LinearSeqOptimized[A, Stream[A]] {
self =>
override def companion: GenericCompanion[Stream] = Stream
@@ -113,17 +113,21 @@ self =>
* then StreamBuilder will be chosen for the implicit.
* we recognize that fact and optimize to get more laziness.
*/
- override def ++[B >: A, That](that: Traversable[B])(implicit bf: CanBuildFrom[Stream[A], B, That]): That = {
+ override def ++[B >: A, That](that: TraversableOnce[B])(implicit bf: CanBuildFrom[Stream[A], B, That]): That = {
// we assume there is no other builder factory on streams and therefore know that That = Stream[A]
(if (isEmpty) that.toStream
else new Stream.Cons(head, (tail ++ that).asInstanceOf[Stream[A]])).asInstanceOf[That]
}
- /** Create a new stream which contains all elements of this stream
- * followed by all elements of Iterator `that'
+ /**
+ * Create a new stream which contains all intermediate results of applying the operator
+ * to subsequent elements left to right.
+ * @note This works because the target type of the Builder That is a Stream.
*/
- override def++[B >: A, That](that: Iterator[B])(implicit bf: CanBuildFrom[Stream[A], B, That]): That =
- this ++ that.toStream
+ override final def scanLeft[B, That](z: B)(op: (B, A) => B)(implicit bf: CanBuildFrom[Stream[A], B, That]): That = {
+ (if (this.isEmpty) Stream(z)
+ else new Stream.Cons(z, tail.scanLeft(op(z, head))(op).asInstanceOf[Stream[B]])).asInstanceOf[That]
+ }
/** Returns the stream resulting from applying the given function
* <code>f</code> to each element of this stream.
@@ -344,9 +348,9 @@ self =>
/** Builds a new stream from this stream in which any duplicates (wrt to ==) removed.
* Among duplicate elements, only the first one is retained in the result stream
*/
- override def removeDuplicates: Stream[A] =
+ override def distinct: Stream[A] =
if (isEmpty) this
- else new Stream.Cons(head, tail.filter(head !=).removeDuplicates)
+ else new Stream.Cons(head, tail.filter(head !=).distinct)
/** Returns a new sequence of given length containing the elements of this sequence followed by zero
* or more occurrences of given elements.
@@ -420,12 +424,12 @@ object Stream extends SeqFactory[Stream] {
import scala.collection.{Iterable, Seq, IndexedSeq}
/** A builder for streams
- * @note: This builder is lazy only in the sense that it does not go downs the spine
- * of traversables that are added as a whole. If more laziness can be achieved,
- * this builder should be bypassed.
+ * @note This builder is lazy only in the sense that it does not go downs the spine
+ * of traversables that are added as a whole. If more laziness can be achieved,
+ * this builder should be bypassed.
*/
class StreamBuilder[A] extends scala.collection.mutable.LazyBuilder[A, Stream[A]] {
- def result: Stream[A] = (for (xs <- parts.iterator; x <- xs.toIterable.iterator) yield x).toStream
+ def result: Stream[A] = parts.toStream flatMap (_.toStream)
}
object Empty extends Stream[Nothing] {
diff --git a/src/library/scala/collection/immutable/StringLike.scala b/src/library/scala/collection/immutable/StringLike.scala
index 500de352f6..5b5a627cfe 100644
--- a/src/library/scala/collection/immutable/StringLike.scala
+++ b/src/library/scala/collection/immutable/StringLike.scala
@@ -34,7 +34,7 @@ import StringLike._
/**
* @since 2.8
*/
-trait StringLike[+Repr] extends IndexedSeqLike[Char, Repr] with Ordered[String] {
+trait StringLike[+Repr] extends IndexedSeqOptimized[Char, Repr] with Ordered[String] {
self =>
/** Creates a string builder buffer as builder for this class */
@@ -263,7 +263,7 @@ self =>
* @param args the arguments used to instantiating the pattern.
* @throws java.lang.IllegalArgumentException
*/
- def format(l: java.util.Locale, args: Any*): String =
+ def formatLocal(l: java.util.Locale, args: Any*): String =
java.lang.String.format(l, toString, args map unwrapArg: _*)
}
diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala
index 1a3ed38e1c..79e1a6b00b 100644
--- a/src/library/scala/collection/immutable/TreeSet.scala
+++ b/src/library/scala/collection/immutable/TreeSet.scala
@@ -19,8 +19,7 @@ import mutable.{Builder, AddingBuilder}
*
* @since 1
*/
-object TreeSet extends SortedSetFactory[TreeSet]{
-
+object TreeSet extends SortedSetFactory[TreeSet] {
implicit def implicitBuilder[A](implicit ordering: Ordering[A]): Builder[A, TreeSet[A]] = newBuilder[A](ordering)
override def newBuilder[A](implicit ordering: Ordering[A]): Builder[A, TreeSet[A]] =
new AddingBuilder(empty[A](ordering))
@@ -28,7 +27,6 @@ object TreeSet extends SortedSetFactory[TreeSet]{
/** The empty set of this type
*/
def empty[A](implicit ordering: Ordering[A]) = new TreeSet[A]
-
}
/** This class implements immutable sets using a tree.
diff --git a/src/library/scala/collection/immutable/Vector.scala b/src/library/scala/collection/immutable/Vector.scala
index 1326768090..6defe66d6f 100644
--- a/src/library/scala/collection/immutable/Vector.scala
+++ b/src/library/scala/collection/immutable/Vector.scala
@@ -19,24 +19,25 @@ import scala.collection.mutable.Builder
object Vector extends SeqFactory[Vector] {
- /*private[immutable]*/ val BF = new GenericCanBuildFrom[Nothing] {
+ private[immutable] val BF = new GenericCanBuildFrom[Nothing] {
override def apply() = newBuilder[Nothing]
}
@inline implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Vector[A]] =
BF.asInstanceOf[CanBuildFrom[Coll, A, Vector[A]]]
def newBuilder[A]: Builder[A, Vector[A]] = new VectorBuilder[A]
- /*private[immutable]*/ val NIL = new Vector[Nothing](0, 0, 0)
+ private[immutable] val NIL = new Vector[Nothing](0, 0, 0)
@inline override def empty[A]: Vector[A] = NIL
}
-// TODO: most members are still public -> restrict access (caveat: private prevents inlining)
+// in principle, most members should be private. however, access privileges must
+// be carefully chosen to not prevent method inlining
@serializable
-final class Vector[+A](startIndex: Int, endIndex: Int, focus: Int) extends Seq[A]
+final class Vector[+A](startIndex: Int, endIndex: Int, focus: Int) extends IndexedSeq[A]
with GenericTraversableTemplate[A, Vector]
- with SeqLike[A, Vector[A]]
- with VectorPointer[A @uncheckedVariance] {
+ with IndexedSeqLike[A, Vector[A]]
+ with VectorPointer[A @uncheckedVariance] { self =>
override def companion: GenericCompanion[Vector] = Vector
@@ -45,7 +46,7 @@ override def companion: GenericCompanion[Vector] = Vector
//assert(focus >= 0, focus+"<0")
//assert(focus <= endIndex, focus+">"+endIndex)
- /*private*/ var dirty = false
+ private[immutable] var dirty = false
def length = endIndex - startIndex
@@ -60,20 +61,35 @@ override def companion: GenericCompanion[Vector] = Vector
s
}
+
+ // can still be improved
+ override /*SeqLike*/
+ def reverseIterator: Iterator[A] = new Iterator[A] {
+ private var i = self.length
+ def hasNext: Boolean = 0 < i
+ def next: A =
+ if (0 < i) {
+ i -= 1
+ self(i)
+ } else Iterator.empty.next
+ }
+
+ // TODO: reverse
+
// TODO: check performance of foreach/map etc. should override or not?
// Ideally, clients will inline calls to map all the way down, including the iterator/builder methods.
// In principle, escape analysis could even remove the iterator/builder allocations and do it
// with local variables exclusively. But we're not quite there yet ...
- @inline def foreach0[U](f: A => U): Unit = iterator.foreach0(f)
- @inline def map0[B, That](f: A => B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That = {
+ @deprecated("this method is experimental and will be removed in a future release")
+ @inline def foreachFast[U](f: A => U): Unit = iterator.foreachFast(f)
+ @deprecated("this method is experimental and will be removed in a future release")
+ @inline def mapFast[B, That](f: A => B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That = {
val b = bf(repr)
- foreach0(x => b += f(x))
+ foreachFast(x => b += f(x))
b.result
}
- // TODO: reverse
- // TODO: reverseIterator
def apply(index: Int): A = {
val idx = checkRangeConvert(index)
@@ -108,41 +124,71 @@ override def companion: GenericCompanion[Vector] = Vector
}
override def take(n: Int): Vector[A] = {
- if (n < 0) throw new IllegalArgumentException(n.toString)
- if (startIndex + n < endIndex) {
+ if (n <= 0)
+ Vector.empty
+ else if (startIndex + n < endIndex)
dropBack0(startIndex + n)
- } else
+ else
this
}
override def drop(n: Int): Vector[A] = {
- if (n < 0) throw new IllegalArgumentException(n.toString)
- if (startIndex + n < endIndex) {
+ if (n <= 0)
+ this
+ else if (startIndex + n < endIndex)
dropFront0(startIndex + n)
- } else
+ else
Vector.empty
}
override def takeRight(n: Int): Vector[A] = {
- if (n < 0) throw new IllegalArgumentException(n.toString)
- if (endIndex - n > startIndex) {
+ if (n <= 0)
+ Vector.empty
+ else if (endIndex - n > startIndex)
dropFront0(endIndex - n)
- } else
+ else
this
}
override def dropRight(n: Int): Vector[A] = {
- if (n < 0) throw new IllegalArgumentException(n.toString)
- if (endIndex - n > startIndex) {
+ if (n <= 0)
+ this
+ else if (endIndex - n > startIndex)
dropBack0(endIndex - n)
- } else
+ else
Vector.empty
}
+ override /*IterableLike*/ def head: A = {
+ if (isEmpty) throw new UnsupportedOperationException("empty.head")
+ apply(0)
+ }
+
+ override /*TraversableLike*/ def tail: Vector[A] = {
+ if (isEmpty) throw new UnsupportedOperationException("empty.tail")
+ drop(1)
+ }
+
+ override /*TraversableLike*/ def last: A = {
+ if (isEmpty) throw new UnsupportedOperationException("empty.last")
+ apply(length-1)
+ }
+
+ override /*TraversableLike*/ def init: Vector[A] = {
+ if (isEmpty) throw new UnsupportedOperationException("empty.init")
+ dropRight(1)
+ }
+
+ override /*IterableLike*/ def slice(from: Int, until: Int): Vector[A] =
+ take(until).drop(from)
+
+ override /*IterableLike*/ def splitAt(n: Int): (Vector[A], Vector[A]) = (take(n), drop(n))
+
+
// semi-private api
- def updateAt[B >: A](index: Int, elem: B): Vector[B] = {
+ private[immutable] def updateAt[B >: A](index: Int, elem: B): Vector[B] = {
val idx = checkRangeConvert(index)
val s = new Vector[B](startIndex, endIndex, idx)
s.initFrom(this)
@@ -153,7 +199,6 @@ override def companion: GenericCompanion[Vector] = Vector
}
-
private def gotoPosWritable(oldIndex: Int, newIndex: Int, xor: Int) = if (dirty) {
gotoPosWritable1(oldIndex, newIndex, xor)
} else {
@@ -168,7 +213,7 @@ override def companion: GenericCompanion[Vector] = Vector
dirty = true
}
- def appendFront[B>:A](value: B): Vector[B] = {
+ private[immutable] def appendFront[B>:A](value: B): Vector[B] = {
if (endIndex != startIndex) {
var blockIndex = (startIndex - 1) & ~31
var lo = (startIndex - 1) & 31
@@ -263,7 +308,7 @@ override def companion: GenericCompanion[Vector] = Vector
}
}
- def appendBack[B>:A](value: B): Vector[B] = {
+ private[immutable] def appendBack[B>:A](value: B): Vector[B] = {
// //println("------- append " + value)
// debug()
if (endIndex != startIndex) {
@@ -361,22 +406,22 @@ override def companion: GenericCompanion[Vector] = Vector
display5 = copyRange(display5, oldLeft, newLeft)
}
- def zeroLeft(array: Array[AnyRef], index: Int): Unit = {
+ private def zeroLeft(array: Array[AnyRef], index: Int): Unit = {
var i = 0; while (i < index) { array(i) = null; i+=1 }
}
- def zeroRight(array: Array[AnyRef], index: Int): Unit = {
+ private def zeroRight(array: Array[AnyRef], index: Int): Unit = {
var i = index; while (i < array.length) { array(i) = null; i+=1 }
}
- def copyLeft(array: Array[AnyRef], right: Int): Array[AnyRef] = {
+ private def copyLeft(array: Array[AnyRef], right: Int): Array[AnyRef] = {
// if (array eq null)
// println("OUCH!!! " + right + "/" + depth + "/"+startIndex + "/" + endIndex + "/" + focus)
val a2 = new Array[AnyRef](array.length)
Platform.arraycopy(array, 0, a2, 0, right)
a2
}
- def copyRight(array: Array[AnyRef], left: Int): Array[AnyRef] = {
+ private def copyRight(array: Array[AnyRef], left: Int): Array[AnyRef] = {
val a2 = new Array[AnyRef](array.length)
Platform.arraycopy(array, left, a2, left, a2.length - left)
a2
@@ -592,18 +637,17 @@ final class VectorIterator[+A](_startIndex: Int, _endIndex: Int) extends Iterato
res
}
- // TODO: take
- // TODO: drop
+ // TODO: drop (important?)
- // TODO: remove!
- @inline def foreach0[U](f: A => U) { while (hasNext) f(next()) }
+ @deprecated("this method is experimental and will be removed in a future release")
+ @inline def foreachFast[U](f: A => U) { while (hasNext) f(next()) }
}
final class VectorBuilder[A]() extends Builder[A,Vector[A]] with VectorPointer[A @uncheckedVariance] {
- // TODO: possible alternative: start with display0 = null, blockIndex = -32, lo = 32
- // to avoid allocation initial array if the result will be empty anyways
+ // possible alternative: start with display0 = null, blockIndex = -32, lo = 32
+ // to avoid allocating initial array if the result will be empty anyways
display0 = new Array[AnyRef](32)
depth = 1
@@ -612,7 +656,7 @@ final class VectorBuilder[A]() extends Builder[A,Vector[A]] with VectorPointer[A
private var lo = 0
def += (elem: A): this.type = {
- if (lo == 32) {
+ if (lo >= display0.length) {
val newBlockIndex = blockIndex+32
gotoNextBlockStartWritable(newBlockIndex, blockIndex ^ newBlockIndex)
blockIndex = newBlockIndex
@@ -624,11 +668,12 @@ final class VectorBuilder[A]() extends Builder[A,Vector[A]] with VectorPointer[A
}
def result: Vector[A] = {
- if (blockIndex + lo == 0)
+ val size = blockIndex + lo
+ if (size == 0)
return Vector.empty
- val s = new Vector[A](0, blockIndex + lo, 0) // TODO: should focus front or back?
+ val s = new Vector[A](0, size, 0) // should focus front or back?
s.initFrom(this)
- if (depth > 1) s.gotoPos(0, blockIndex + lo)
+ if (depth > 1) s.gotoPos(0, size - 1) // we're currently focused to size - 1, not size!
s
}
@@ -643,18 +688,18 @@ final class VectorBuilder[A]() extends Builder[A,Vector[A]] with VectorPointer[A
private[immutable] trait VectorPointer[T] {
- var depth: Int = _
- var display0: Array[AnyRef] = _
- var display1: Array[AnyRef] = _
- var display2: Array[AnyRef] = _
- var display3: Array[AnyRef] = _
- var display4: Array[AnyRef] = _
- var display5: Array[AnyRef] = _
+ private[immutable] var depth: Int = _
+ private[immutable] var display0: Array[AnyRef] = _
+ private[immutable] var display1: Array[AnyRef] = _
+ private[immutable] var display2: Array[AnyRef] = _
+ private[immutable] var display3: Array[AnyRef] = _
+ private[immutable] var display4: Array[AnyRef] = _
+ private[immutable] var display5: Array[AnyRef] = _
// used
- final def initFrom[U](that: VectorPointer[U]): Unit = initFrom(that, that.depth)
+ private[immutable] final def initFrom[U](that: VectorPointer[U]): Unit = initFrom(that, that.depth)
- final def initFrom[U](that: VectorPointer[U], depth: Int) = {
+ private[immutable] final def initFrom[U](that: VectorPointer[U], depth: Int) = {
this.depth = depth
(depth - 1) match {
case -1 =>
@@ -690,7 +735,7 @@ private[immutable] trait VectorPointer[T] {
// requires structure is at pos oldIndex = xor ^ index
- final def getElem(index: Int, xor: Int): T = {
+ private[immutable] final def getElem(index: Int, xor: Int): T = {
if (xor < (1 << 5)) { // level = 0
display0(index & 31).asInstanceOf[T]
} else
@@ -717,7 +762,7 @@ private[immutable] trait VectorPointer[T] {
// go to specific position
// requires structure is at pos oldIndex = xor ^ index,
// ensures structure is at pos index
- final def gotoPos(index: Int, xor: Int): Unit = {
+ private[immutable] final def gotoPos(index: Int, xor: Int): Unit = {
if (xor < (1 << 5)) { // level = 0 (could maybe removed)
} else
if (xor < (1 << 10)) { // level = 1
@@ -754,7 +799,7 @@ private[immutable] trait VectorPointer[T] {
// USED BY ITERATOR
// xor: oldIndex ^ index
- final def gotoNextBlockStart(index: Int, xor: Int): Unit = { // goto block start pos
+ private[immutable] final def gotoNextBlockStart(index: Int, xor: Int): Unit = { // goto block start pos
if (xor < (1 << 10)) { // level = 1
display0 = display1((index >> 5) & 31).asInstanceOf[Array[AnyRef]]
} else
@@ -787,7 +832,7 @@ private[immutable] trait VectorPointer[T] {
// USED BY BUILDER
// xor: oldIndex ^ index
- final def gotoNextBlockStartWritable(index: Int, xor: Int): Unit = { // goto block start pos
+ private[immutable] final def gotoNextBlockStartWritable(index: Int, xor: Int): Unit = { // goto block start pos
if (xor < (1 << 10)) { // level = 1
if (depth == 1) { display1 = new Array(32); display1(0) = display0; depth+=1}
display0 = new Array(32)
@@ -841,14 +886,15 @@ private[immutable] trait VectorPointer[T] {
// STUFF BELOW USED BY APPEND / UPDATE
- final def copyOf(a: Array[AnyRef]) = {
+ private[immutable] final def copyOf(a: Array[AnyRef]) = {
//println("copy")
+ if (a eq null) println ("NULL")
val b = new Array[AnyRef](a.length)
Platform.arraycopy(a, 0, b, 0, a.length)
b
}
- final def nullSlotAndCopy(array: Array[AnyRef], index: Int) = {
+ private[immutable] final def nullSlotAndCopy(array: Array[AnyRef], index: Int) = {
//println("copy and null")
val x = array(index)
array(index) = null
@@ -860,7 +906,7 @@ private[immutable] trait VectorPointer[T] {
// requires structure is at pos index
// ensures structure is clean and at pos index and writable at all levels except 0
- final def stabilize(index: Int) = (depth - 1) match {
+ private[immutable] final def stabilize(index: Int) = (depth - 1) match {
case 5 =>
display5 = copyOf(display5)
display4 = copyOf(display4)
@@ -901,16 +947,13 @@ private[immutable] trait VectorPointer[T] {
-
-
-
/// USED IN UPDATE AND APPEND BACK
// prepare for writing at an existing position
// requires structure is clean and at pos oldIndex = xor ^ newIndex,
// ensures structure is dirty and at pos newIndex and writable at level 0
- final def gotoPosWritable0(newIndex: Int, xor: Int): Unit = (depth - 1) match {
+ private[immutable] final def gotoPosWritable0(newIndex: Int, xor: Int): Unit = (depth - 1) match {
case 5 =>
display5 = copyOf(display5)
display4 = nullSlotAndCopy(display5, (newIndex >> 25) & 31).asInstanceOf[Array[AnyRef]]
@@ -943,7 +986,7 @@ private[immutable] trait VectorPointer[T] {
// requires structure is dirty and at pos oldIndex,
// ensures structure is dirty and at pos newIndex and writable at level 0
- final def gotoPosWritable1(oldIndex: Int, newIndex: Int, xor: Int): Unit = {
+ private[immutable] final def gotoPosWritable1(oldIndex: Int, newIndex: Int, xor: Int): Unit = {
if (xor < (1 << 5)) { // level = 0
display0 = copyOf(display0)
} else
@@ -1009,7 +1052,7 @@ private[immutable] trait VectorPointer[T] {
// USED IN DROP
- final def copyRange(array: Array[AnyRef], oldLeft: Int, newLeft: Int) = {
+ private[immutable] final def copyRange(array: Array[AnyRef], oldLeft: Int, newLeft: Int) = {
val elems = new Array[AnyRef](32)
Platform.arraycopy(array, oldLeft, elems, newLeft, 32 - math.max(newLeft,oldLeft))
elems
@@ -1023,7 +1066,7 @@ private[immutable] trait VectorPointer[T] {
// requires structure is clean and at pos oldIndex,
// ensures structure is dirty and at pos newIndex and writable at level 0
- final def gotoFreshPosWritable0(oldIndex: Int, newIndex: Int, xor: Int): Unit = { // goto block start pos
+ private[immutable] final def gotoFreshPosWritable0(oldIndex: Int, newIndex: Int, xor: Int): Unit = { // goto block start pos
if (xor < (1 << 5)) { // level = 0
//println("XXX clean with low xor")
} else
@@ -1103,7 +1146,7 @@ private[immutable] trait VectorPointer[T] {
// requires structure is dirty and at pos oldIndex,
// ensures structure is dirty and at pos newIndex and writable at level 0
- final def gotoFreshPosWritable1(oldIndex: Int, newIndex: Int, xor: Int): Unit = {
+ private[immutable] final def gotoFreshPosWritable1(oldIndex: Int, newIndex: Int, xor: Int): Unit = {
stabilize(oldIndex)
gotoFreshPosWritable0(oldIndex, newIndex, xor)
}
@@ -1113,7 +1156,7 @@ private[immutable] trait VectorPointer[T] {
// DEBUG STUFF
- def debug(): Unit = {
+ private[immutable] def debug(): Unit = {
return
/*
//println("DISPLAY 5: " + display5 + " ---> " + (if (display5 ne null) display5.map(x=> if (x eq null) "." else x + "->" +x.asInstanceOf[Array[AnyRef]].mkString("")).mkString(" ") else "null"))
diff --git a/src/library/scala/collection/interfaces/MapMethods.scala b/src/library/scala/collection/interfaces/MapMethods.scala
index dbe05906b1..fd6e7ad2a7 100644
--- a/src/library/scala/collection/interfaces/MapMethods.scala
+++ b/src/library/scala/collection/interfaces/MapMethods.scala
@@ -30,15 +30,15 @@ with SubtractableMethods[A, This]
def apply(key: A): B
def contains(key: A): Boolean
def isDefinedAt(key: A): Boolean
- def keySet: Set[A]
+ def keys: Iterable[A]
def keysIterator: Iterator[A]
- def valuesIterable: Iterable[B]
+ def keySet: Set[A]
+ def values: Iterable[B]
def valuesIterator: Iterator[B]
def default(key: A): B
def filterKeys(p: A => Boolean): DefaultMap[A, B]
def mapValues[C](f: B => C): DefaultMap[A, C]
def updated [B1 >: B](key: A, value: B1): Map[A, B1]
def + [B1 >: B] (elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *): Map[A, B1]
- def ++[B1 >: B](elems: Traversable[(A, B1)]): Map[A, B1]
- def ++[B1 >: B] (iter: Iterator[(A, B1)]): Map[A, B1]
+ def ++[B1 >: B](xs: TraversableOnce[(A, B1)]): Map[A, B1]
}
diff --git a/src/library/scala/collection/interfaces/SeqMethods.scala b/src/library/scala/collection/interfaces/SeqMethods.scala
index df0307174d..401c5e6c55 100644
--- a/src/library/scala/collection/interfaces/SeqMethods.scala
+++ b/src/library/scala/collection/interfaces/SeqMethods.scala
@@ -44,7 +44,7 @@ trait SeqMethods[+A, +This <: SeqLike[A, This] with Seq[A]] extends IterableMeth
def padTo[B >: A, That](len: Int, elem: B)(implicit bf: CanBuildFrom[This, B, That]): That
def patch[B >: A, That](from: Int, patch: Seq[B], replaced: Int)(implicit bf: CanBuildFrom[This, B, That]): That
def prefixLength(p: A => Boolean): Int
- def removeDuplicates: This
+ def distinct: This
def reverse: This
def reverseIterator: Iterator[A]
def segmentLength(p: A => Boolean, from: Int): Int
diff --git a/src/library/scala/collection/interfaces/SetMethods.scala b/src/library/scala/collection/interfaces/SetMethods.scala
index 8a3142b44a..453143b790 100644
--- a/src/library/scala/collection/interfaces/SetMethods.scala
+++ b/src/library/scala/collection/interfaces/SetMethods.scala
@@ -21,8 +21,7 @@ trait AddableMethods[A, +This <: Addable[A, This]] {
protected def repr: This
def +(elem: A): This
def + (elem1: A, elem2: A, elems: A*): This
- def ++ (elems: Traversable[A]): This
- def ++ (iter: Iterator[A]): This
+ def ++ (xs: TraversableOnce[A]): This
}
/**
@@ -32,8 +31,7 @@ trait SubtractableMethods[A, +This <: Subtractable[A, This]] {
protected def repr: This
def -(elem: A): This
def -(elem1: A, elem2: A, elems: A*): This
- def --(elems: Traversable[A]): This
- def --(iter: Iterator[A]): This
+ def --(xs: TraversableOnce[A]): This
}
/**
diff --git a/src/library/scala/collection/interfaces/TraversableMethods.scala b/src/library/scala/collection/interfaces/TraversableMethods.scala
index 08ade7586d..1fc2451ec0 100644
--- a/src/library/scala/collection/interfaces/TraversableMethods.scala
+++ b/src/library/scala/collection/interfaces/TraversableMethods.scala
@@ -24,11 +24,10 @@ trait TraversableMethods[+A, +This <: TraversableLike[A, This] with Traversable[
// maps/iteration
def flatMap[B, That](f: A => Traversable[B])(implicit bf: CanBuildFrom[This, B, That]): That
def map[B, That](f: A => B)(implicit bf: CanBuildFrom[This, B, That]): That
- def partialMap[B, That](pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[This, B, That]): That
+ def collect[B, That](pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[This, B, That]): That
// new collections
- def ++[B >: A, That](that: Iterator[B])(implicit bf: CanBuildFrom[This, B, That]): That
- def ++[B >: A, That](that: Traversable[B])(implicit bf: CanBuildFrom[This, B, That]): That
+ def ++[B >: A, That](xs: TraversableOnce[B])(implicit bf: CanBuildFrom[This, B, That]): That
def copyToArray[B >: A](xs: Array[B], start: Int): Unit
def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Unit
def copyToBuffer[B >: A](dest: Buffer[B]): Unit
diff --git a/src/library/scala/collection/interfaces/TraversableOnceMethods.scala b/src/library/scala/collection/interfaces/TraversableOnceMethods.scala
new file mode 100644
index 0000000000..1e71215efd
--- /dev/null
+++ b/src/library/scala/collection/interfaces/TraversableOnceMethods.scala
@@ -0,0 +1,69 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package interfaces
+
+import mutable.Buffer
+
+trait TraversableOnceMethods[+A] {
+ self: TraversableOnce[A] =>
+
+ def foreach[U](f: A => U): Unit
+ protected[this] def reversed: TraversableOnce[A]
+
+ // tests
+ def isEmpty: Boolean
+ def nonEmpty: Boolean
+ def hasDefiniteSize: Boolean
+ def isTraversableAgain: Boolean
+
+ // applying a predicate
+ def forall(p: A => Boolean): Boolean
+ def exists(p: A => Boolean): Boolean
+ def find(p: A => Boolean): Option[A]
+ def count(p: A => Boolean): Int
+
+ // folds
+ def /:[B](z: B)(op: (B, A) => B): B
+ def :\[B](z: B)(op: (A, B) => B): B
+ def foldLeft[B](z: B)(op: (B, A) => B): B
+ def foldRight[B](z: B)(op: (A, B) => B): B
+ def reduceLeft[B >: A](op: (B, A) => B): B
+ def reduceRight[B >: A](op: (A, B) => B): B
+ def reduceLeftOption[B >: A](op: (B, A) => B): Option[B]
+ def reduceRightOption[B >: A](op: (A, B) => B): Option[B]
+
+ def sum[B >: A](implicit num: Numeric[B]): B
+ def product[B >: A](implicit num: Numeric[B]): B
+ def min[B >: A](implicit cmp: Ordering[B]): A
+ def max[B >: A](implicit cmp: Ordering[B]): A
+
+ // copies and conversions
+ def copyToBuffer[B >: A](dest: Buffer[B]): Unit
+ def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Unit
+ def copyToArray[B >: A](xs: Array[B], start: Int): Unit
+ def copyToArray[B >: A](xs: Array[B]): Unit
+
+ def toArray[B >: A : ClassManifest]: Array[B]
+ def toIterable: Iterable[A]
+ def toIterator: Iterator[A]
+ def toList: List[A]
+ def toMap[T, U](implicit ev: A <:< (T, U)): immutable.Map[T, U]
+ def toSet[B >: A]: immutable.Set[B]
+ def toStream: Stream[A]
+ def toTraversable: Traversable[A]
+
+ def mkString(start: String, sep: String, end: String): String
+ def mkString(sep: String): String
+ def mkString: String
+
+ def addString(buf: StringBuilder, start: String, sep: String, end: String): StringBuilder
+ def addString(buf: StringBuilder, sep: String): StringBuilder
+ def addString(buf: StringBuilder): StringBuilder
+}
diff --git a/src/library/scala/collection/mutable/AddingBuilder.scala b/src/library/scala/collection/mutable/AddingBuilder.scala
index 06822e859b..d16a4a71f3 100644
--- a/src/library/scala/collection/mutable/AddingBuilder.scala
+++ b/src/library/scala/collection/mutable/AddingBuilder.scala
@@ -24,7 +24,7 @@ import generic._
* @version 2.8
* @since 2.8
*/
-class AddingBuilder[Elem, To <: Addable[Elem, To] with scala.collection.Iterable[Elem] with scala.collection.IterableLike[Elem, To]](empty: To)
+class AddingBuilder[Elem, To <: Addable[Elem, To] with collection.Iterable[Elem] with collection.IterableLike[Elem, To]](empty: To)
extends Builder[Elem, To] {
protected var elems: To = empty
def +=(x: Elem): this.type = { elems = elems + x; this }
diff --git a/src/library/scala/collection/mutable/ArrayBuffer.scala b/src/library/scala/collection/mutable/ArrayBuffer.scala
index c88b9d3374..0c6aa9ce0c 100644
--- a/src/library/scala/collection/mutable/ArrayBuffer.scala
+++ b/src/library/scala/collection/mutable/ArrayBuffer.scala
@@ -29,7 +29,7 @@ class ArrayBuffer[A](override protected val initialSize: Int)
extends Buffer[A]
with GenericTraversableTemplate[A, ArrayBuffer]
with BufferLike[A, ArrayBuffer[A]]
- with IndexedSeqLike[A, ArrayBuffer[A]]
+ with IndexedSeqOptimized[A, ArrayBuffer[A]]
with Builder[A, ArrayBuffer[A]]
with ResizableArray[A] {
@@ -43,7 +43,7 @@ class ArrayBuffer[A](override protected val initialSize: Int)
override def sizeHint(len: Int) {
if (len > size && len >= 1) {
- val newarray = new Array[AnyRef](len min 1)
+ val newarray = new Array[AnyRef](len)
Array.copy(array, 0, newarray, 0, size0)
array = newarray
}
@@ -65,10 +65,10 @@ class ArrayBuffer[A](override protected val initialSize: Int)
* via its <code>iterator</code> method. The identity of the
* buffer is returned.
*
- * @param iter the iterfable object.
+ * @param iter the iterable object.
* @return the updated buffer.
*/
- override def ++=(iter: Traversable[A]): this.type = iter match {
+ override def ++=(xs: TraversableOnce[A]): this.type = xs match {
case v: IndexedSeq[_] =>
val n = v.length
ensureSize(size0 + n)
@@ -76,7 +76,7 @@ class ArrayBuffer[A](override protected val initialSize: Int)
size0 += n
this
case _ =>
- super.++=(iter)
+ super.++=(xs)
}
/** Prepends a single element to this buffer and return
@@ -101,7 +101,7 @@ class ArrayBuffer[A](override protected val initialSize: Int)
* @param iter the iterable object.
* @return the updated buffer.
*/
- override def ++=:(iter: Traversable[A]): this.type = { insertAll(0, iter); this }
+ override def ++=:(xs: TraversableOnce[A]): this.type = { insertAll(0, xs.toTraversable); this }
/** Inserts new elements at the index <code>n</code>. Opposed to method
* <code>update</code>, this method will not replace an element with a
@@ -125,7 +125,7 @@ class ArrayBuffer[A](override protected val initialSize: Int)
* the buffer size.
*
* @param n the index which refers to the first element to delete.
- * @param count the number of elemenets to delete
+ * @param count the number of elements to delete
* @throws Predef.IndexOutOfBoundsException if <code>n</code> is out of bounds.
*/
override def remove(n: Int, count: Int) {
diff --git a/src/library/scala/collection/mutable/ArrayBuilder.scala b/src/library/scala/collection/mutable/ArrayBuilder.scala
index c824cc15f5..7e4bab353b 100644
--- a/src/library/scala/collection/mutable/ArrayBuilder.scala
+++ b/src/library/scala/collection/mutable/ArrayBuilder.scala
@@ -67,7 +67,7 @@ object ArrayBuilder {
this
}
- override def ++=(xs: scala.collection.Traversable[T]): this.type = (xs: AnyRef) match {
+ override def ++=(xs: TraversableOnce[T]): this.type = (xs: AnyRef) match {
case xs: WrappedArray.ofRef[_] =>
ensureSize(this.size + xs.length)
Array.copy(xs.array, 0, elems, this.size, xs.length)
@@ -131,7 +131,7 @@ object ArrayBuilder {
this
}
- override def ++=(xs: scala.collection.Traversable[Byte]): this.type = xs match {
+ override def ++=(xs: TraversableOnce[Byte]): this.type = xs match {
case xs: WrappedArray.ofByte =>
ensureSize(this.size + xs.length)
Array.copy(xs.array, 0, elems, this.size, xs.length)
@@ -195,7 +195,7 @@ object ArrayBuilder {
this
}
- override def ++=(xs: scala.collection.Traversable[Short]): this.type = xs match {
+ override def ++=(xs: TraversableOnce[Short]): this.type = xs match {
case xs: WrappedArray.ofShort =>
ensureSize(this.size + xs.length)
Array.copy(xs.array, 0, elems, this.size, xs.length)
@@ -259,7 +259,7 @@ object ArrayBuilder {
this
}
- override def ++=(xs: scala.collection.Traversable[Char]): this.type = xs match {
+ override def ++=(xs: TraversableOnce[Char]): this.type = xs match {
case xs: WrappedArray.ofChar =>
ensureSize(this.size + xs.length)
Array.copy(xs.array, 0, elems, this.size, xs.length)
@@ -323,7 +323,7 @@ object ArrayBuilder {
this
}
- override def ++=(xs: scala.collection.Traversable[Int]): this.type = xs match {
+ override def ++=(xs: TraversableOnce[Int]): this.type = xs match {
case xs: WrappedArray.ofInt =>
ensureSize(this.size + xs.length)
Array.copy(xs.array, 0, elems, this.size, xs.length)
@@ -387,7 +387,7 @@ object ArrayBuilder {
this
}
- override def ++=(xs: scala.collection.Traversable[Long]): this.type = xs match {
+ override def ++=(xs: TraversableOnce[Long]): this.type = xs match {
case xs: WrappedArray.ofLong =>
ensureSize(this.size + xs.length)
Array.copy(xs.array, 0, elems, this.size, xs.length)
@@ -451,7 +451,7 @@ object ArrayBuilder {
this
}
- override def ++=(xs: scala.collection.Traversable[Float]): this.type = xs match {
+ override def ++=(xs: TraversableOnce[Float]): this.type = xs match {
case xs: WrappedArray.ofFloat =>
ensureSize(this.size + xs.length)
Array.copy(xs.array, 0, elems, this.size, xs.length)
@@ -515,7 +515,7 @@ object ArrayBuilder {
this
}
- override def ++=(xs: scala.collection.Traversable[Double]): this.type = xs match {
+ override def ++=(xs: TraversableOnce[Double]): this.type = xs match {
case xs: WrappedArray.ofDouble =>
ensureSize(this.size + xs.length)
Array.copy(xs.array, 0, elems, this.size, xs.length)
@@ -579,7 +579,7 @@ object ArrayBuilder {
this
}
- override def ++=(xs: scala.collection.Traversable[Boolean]): this.type = xs match {
+ override def ++=(xs: TraversableOnce[Boolean]): this.type = xs match {
case xs: WrappedArray.ofBoolean =>
ensureSize(this.size + xs.length)
Array.copy(xs.array, 0, elems, this.size, xs.length)
@@ -643,7 +643,7 @@ object ArrayBuilder {
this
}
- override def ++=(xs: scala.collection.Traversable[Unit]): this.type = xs match {
+ override def ++=(xs: TraversableOnce[Unit]): this.type = xs match {
case xs: WrappedArray.ofUnit =>
ensureSize(this.size + xs.length)
Array.copy(xs.array, 0, elems, this.size, xs.length)
diff --git a/src/library/scala/collection/mutable/ArrayLike.scala b/src/library/scala/collection/mutable/ArrayLike.scala
index c26f333afb..0b64f1255e 100644
--- a/src/library/scala/collection/mutable/ArrayLike.scala
+++ b/src/library/scala/collection/mutable/ArrayLike.scala
@@ -18,7 +18,7 @@ import generic._
*
* @since 2.8
*/
-trait ArrayLike[A, +Repr] extends IndexedSeqLike[A, Repr] { self =>
+trait ArrayLike[A, +Repr] extends IndexedSeqOptimized[A, Repr] { self =>
/** Creates a possible nested IndexedSeq which consists of all the elements
* of this array. If the elements are arrays themselves, the `deep' transformation
diff --git a/src/library/scala/collection/mutable/ArrayOps.scala b/src/library/scala/collection/mutable/ArrayOps.scala
index 61fcc77e14..553461c805 100644
--- a/src/library/scala/collection/mutable/ArrayOps.scala
+++ b/src/library/scala/collection/mutable/ArrayOps.scala
@@ -24,6 +24,12 @@ abstract class ArrayOps[T] extends ArrayLike[T, Array[T]] {
ClassManifest.fromClass(
repr.getClass.getComponentType.getComponentType.asInstanceOf[Predef.Class[U]]))
+ override def toArray[U >: T : ClassManifest]: Array[U] =
+ if (implicitly[ClassManifest[U]].erasure eq repr.getClass.getComponentType)
+ repr.asInstanceOf[Array[U]]
+ else
+ super.toArray[U]
+
/** Flattens a two-dimensional array by concatenating all its rows
* into a single array
*/
diff --git a/src/library/scala/collection/mutable/GenericArray.scala b/src/library/scala/collection/mutable/ArraySeq.scala
index 4aecf48585..f6f958601d 100644
--- a/src/library/scala/collection/mutable/GenericArray.scala
+++ b/src/library/scala/collection/mutable/ArraySeq.scala
@@ -22,12 +22,12 @@ import generic._
* @version 2.8
* @since 2.8
*/
-class GenericArray[A](override val length: Int)
+class ArraySeq[A](override val length: Int)
extends IndexedSeq[A]
- with GenericTraversableTemplate[A, GenericArray]
- with IndexedSeqLike[A, GenericArray[A]] {
+ with GenericTraversableTemplate[A, ArraySeq]
+ with IndexedSeqOptimized[A, ArraySeq[A]] {
- override def companion: GenericCompanion[GenericArray] = GenericArray
+ override def companion: GenericCompanion[ArraySeq] = ArraySeq
val array: Array[AnyRef] = new Array[AnyRef](length)
@@ -64,11 +64,11 @@ extends IndexedSeq[A]
}
}
-object GenericArray extends SeqFactory[GenericArray] {
- implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, GenericArray[A]] = new GenericCanBuildFrom[A]
- def newBuilder[A]: Builder[A, GenericArray[A]] =
+object ArraySeq extends SeqFactory[ArraySeq] {
+ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, ArraySeq[A]] = new GenericCanBuildFrom[A]
+ def newBuilder[A]: Builder[A, ArraySeq[A]] =
new ArrayBuffer[A] mapResult { buf =>
- val result = new GenericArray[A](buf.length)
+ val result = new ArraySeq[A](buf.length)
buf.copyToArray(result.array.asInstanceOf[Array[Any]], 0)
result
}
diff --git a/src/library/scala/collection/mutable/ArrayStack.scala b/src/library/scala/collection/mutable/ArrayStack.scala
index 1c3bdacaa5..8f9d1bfc88 100644
--- a/src/library/scala/collection/mutable/ArrayStack.scala
+++ b/src/library/scala/collection/mutable/ArrayStack.scala
@@ -104,15 +104,7 @@ class ArrayStack[T] private(private var table : Array[AnyRef],
*
* @param x The source of elements to push
*/
- def ++=(x: scala.collection.Iterable[T]): this.type = { x.foreach(this +=(_)); this }
-
-
- /**
- * Pushes all the provided elements onto the stack.
- *
- * @param x The source of elements to push
- */
- def ++=(x: Iterator[T]): this.type = { x.foreach(this +=(_)); this }
+ def ++=(xs: TraversableOnce[T]): this.type = { xs foreach += ; this }
/**
* Alias for push.
diff --git a/src/library/scala/collection/mutable/BufferLike.scala b/src/library/scala/collection/mutable/BufferLike.scala
index 3516a60233..0fb34cc8e0 100644
--- a/src/library/scala/collection/mutable/BufferLike.scala
+++ b/src/library/scala/collection/mutable/BufferLike.scala
@@ -14,6 +14,7 @@ package mutable
import generic._
import script._
+import annotation.migration
/** A template trait for buffers of type `Buffer[A]`.
*
@@ -30,10 +31,6 @@ import script._
* @author Martin Odersky
* @author Matthias Zenger
* @version 2.8
- * @since 2.8
- * @author Matthias Zenger
- * @author Martin Odersky
- * @version 2.8
* @since 2.8
* @define buffernote @note
* This trait provides most of the operations of a `Buffer` independently of its representation.
@@ -63,12 +60,14 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
extends Growable[A]
with Shrinkable[A]
with Scriptable[A]
- with Addable[A, This]
with Subtractable[A, This]
with Cloneable[This]
with SeqLike[A, This]
{ self : This =>
+ // Note this does not extend Addable because `+` is being phased out of
+ // all Seq-derived classes.
+
import scala.collection.{Iterable, Traversable}
// Abstract methods from IndexedSeq:
@@ -132,50 +131,33 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
this
}
- /** Prepends the elements contained in a traversable collection
- * to this buffer.
- * @param elems the collection containing the elements to prepend.
- * @return the buffer itself.
- */
- def ++=:(elems: Traversable[A]): this.type = { insertAll(0, elems); this }
-
- /** Prepends the elements produced by an iterator to this buffer.
+ /** Prepends elements to this buffer.
*
- * @param iter the iterator producing the elements to prepend.
- * @return the buffer itself.
+ * @param xs the TraversableOnce containing the elements to prepend.
+ * @return the buffer itself.
*/
- def ++=:(iter: Iterator[A]): this.type = { insertAll(0, iter.toSeq); this }
+ def ++=:(xs: TraversableOnce[A]): this.type = { insertAll(0, xs.toTraversable); this }
/** Appends the given elements to this buffer.
*
* @param elems the elements to append.
*/
- def append(elems: A*) { this ++= elems }
+ def append(elems: A*) { appendAll(elems) }
/** Appends the elements contained in a traversable collection to this buffer.
* @param elems the collection containing the elements to append.
*/
- def appendAll(elems: Traversable[A]) { this ++= elems }
-
- /** Appends the elements produced by an iterator to this buffer.
- * @param elems the iterator producing the elements to append.
- */
- def appendAll(iter: Iterator[A]) { this ++= iter }
+ def appendAll(xs: TraversableOnce[A]) { this ++= xs }
/** Prepends given elements to this buffer.
* @param elems the elements to prepend.
*/
- def prepend(elems: A*) { elems ++=: this }
+ def prepend(elems: A*) { prependAll(elems) }
/** Prepends the elements contained in a traversable collection to this buffer.
* @param elems the collection containing the elements to prepend.
*/
- def prependAll(iter: Traversable[A]) { iter ++=: this }
-
- /** Prepends a number of elements produced by an iterator to this buffer.
- * @param iter the iterator producing the elements to prepend.
- */
- def prependAll(iter: Iterator[A]) { iter ++=: this }
+ def prependAll(xs: TraversableOnce[A]) { xs ++=: this }
/** Inserts new elements at a given index into this buffer.
*
@@ -230,7 +212,7 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
*/
override def stringPrefix: String = "Buffer"
- /** Provide a read-only view of this byffer as a sequence
+ /** Provide a read-only view of this buffer as a sequence
* @return A sequence which refers to this buffer for all its operations.
*/
def readOnly: scala.collection.Seq[A] = toSeq
@@ -254,25 +236,35 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
@deprecated("use ++=: instead")
final def ++:(iter: Traversable[A]): This = ++=:(iter)
+ @deprecated("use `+=:' instead")
+ final def +:(elem: A): This = +=:(elem)
+
/** Adds a single element to this collection and returns
- * the collection itself.
+ * the collection itself. Note that for backward compatibility
+ * reasons, this method mutates the collection in place, unlike
+ * similar but undeprecated methods throughout the collections
+ * hierarchy. You are strongly recommended to use '+=' instead.
*
* @param elem the element to add.
*/
@deprecated("Use += instead if you intend to add by side effect to an existing collection.\n"+
- "Use `clone() ++=' if you intend to create a new collection.")
- override def + (elem: A): This = { +=(elem); repr }
+ "Use `clone() +=' if you intend to create a new collection.")
+ def + (elem: A): This = { +=(elem); repr }
/** Adds two or more elements to this collection and returns
- * the collection itself.
+ * the collection itself. Note that for backward compatibility
+ * reasons, this method mutates the collection in place, unlike
+ * all similar methods throughout the collections hierarchy.
+ * similar but undeprecated methods throughout the collections
+ * hierarchy. You are strongly recommended to use '++=' instead.
*
* @param elem1 the first element to add.
* @param elem2 the second element to add.
* @param elems the remaining elements to add.
*/
- @deprecated("Use += instead if you intend to add by side effect to an existing collection.\n"+
+ @deprecated("Use ++= instead if you intend to add by side effect to an existing collection.\n"+
"Use `clone() ++=' if you intend to create a new collection.")
- override def + (elem1: A, elem2: A, elems: A*): This = {
+ def + (elem1: A, elem2: A, elems: A*): This = {
this += elem1 += elem2 ++= elems
repr
}
@@ -282,33 +274,22 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
*
* @param iter the iterable object.
*/
- @deprecated("Use ++= instead if you intend to add by side effect to an existing collection.\n"+
- "Use `clone() ++=` if you intend to create a new collection.")
- override def ++(iter: Traversable[A]): This = {
- for (elem <- iter) +=(elem)
- repr
- }
-
- /** Adds a number of elements provided by an iterator and returns
- * the collection itself.
- *
- * @param iter the iterator
- */
- @deprecated("Use ++= instead if you intend to add by side effect to an existing collection.\n"+
- "Use `clone() ++=` if you intend to create a new collection.")
- override def ++ (iter: Iterator[A]): This = {
- for (elem <- iter) +=(elem)
- repr
- }
+ @migration(2, 8,
+ "As of 2.8, ++ always creates a new collection, even on Buffers.\n"+
+ "Use ++= instead if you intend to add by side effect to an existing collection.\n"
+ )
+ def ++(xs: TraversableOnce[A]): This = clone() ++= xs
/** Removes a single element from this collection and returns
* the collection itself.
*
* @param elem the element to remove.
*/
- @deprecated("Use -= instead if you intend to remove by side effect from an existing collection.\n"+
- "Use `clone() -=` if you intend to create a new collection.")
- override def -(elem: A): This = { -=(elem); repr }
+ @migration(2, 8,
+ "As of 2.8, - always creates a new collection, even on Buffers.\n"+
+ "Use -= instead if you intend to remove by side effect from an existing collection.\n"
+ )
+ override def -(elem: A): This = clone() -= elem
/** Removes two or more elements from this collection and returns
* the collection itself.
@@ -317,40 +298,20 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
* @param elem2 the second element to remove.
* @param elems the remaining elements to remove.
*/
- @deprecated("Use -= instead if you intend to remove by side effect from an existing collection.\n"+
- "Use `clone() -=` if you intend to create a new collection.")
- override def -(elem1: A, elem2: A, elems: A*): This = {
- this -= elem1 -= elem2 --= elems
- repr
- }
+ @migration(2, 8,
+ "As of 2.8, - always creates a new collection, even on Buffers.\n"+
+ "Use -= instead if you intend to remove by side effect from an existing collection.\n"
+ )
+ override def -(elem1: A, elem2: A, elems: A*): This = clone() -= elem1 -= elem2 --= elems
/** Removes a number of elements provided by a Traversable object and returns
* the collection itself.
*
* @param iter the Traversable object.
*/
- @deprecated("Use --= instead if you intend to remove by side effect from an existing collection.\n"+
- "Use `clone() --=` if you intend to create a new collection.")
- override def --(iter: Traversable[A]): This = {
- for (elem <- iter) -=(elem)
- repr
- }
-
- @deprecated("use `+=:' instead")
- final def +:(elem: A): This = +=:(elem)
-
- /** Removes a number of elements provided by an iterator and returns
- * the collection itself.
- *
- * @param iter the iterator
- */
- @deprecated("Use --= instead if you intend to remove by side effect from an existing collection.\n"+
- "Use `clone() --=` if you intend to create a new collection.")
- override def --(iter: Iterator[A]): This = {
- for (elem <- iter) -=(elem)
- repr
- }
+ @migration(2, 8,
+ "As of 2.8, -- always creates a new collection, even on Buffers.\n"+
+ "Use --= instead if you intend to remove by side effect from an existing collection.\n"
+ )
+ override def --(xs: TraversableOnce[A]): This = clone() --= xs
}
-
-
-
diff --git a/src/library/scala/collection/mutable/BufferProxy.scala b/src/library/scala/collection/mutable/BufferProxy.scala
index cc8aba79ab..d4444dab67 100644
--- a/src/library/scala/collection/mutable/BufferProxy.scala
+++ b/src/library/scala/collection/mutable/BufferProxy.scala
@@ -61,14 +61,14 @@ trait BufferProxy[A] extends Buffer[A] with Proxy {
*/
@deprecated("Use ++= instead if you intend to add by side effect to an existing collection.\n"+
"Use `clone() ++=` if you intend to create a new collection.")
- def ++(iter: scala.collection.Iterable[A]): Buffer[A] = self.++(iter)
+ override def ++(xs: TraversableOnce[A]): Buffer[A] = self.++(xs)
/** Appends a number of elements provided by an iterable object
* via its <code>iterator</code> method.
*
* @param iter the iterable object.
*/
- def ++=(iter: scala.collection.Iterable[A]): this.type = { self.++=(iter); this }
+ override def ++=(xs: TraversableOnce[A]): this.type = { self.++=(xs); this }
/** Appends a sequence of elements to this buffer.
*
@@ -81,7 +81,7 @@ trait BufferProxy[A] extends Buffer[A] with Proxy {
*
* @param iter the iterable object.
*/
- def appendAll(iter: scala.collection.Iterable[A]) { self.appendAll(iter) }
+ override def appendAll(xs: TraversableOnce[A]) { self.appendAll(xs) }
/** Prepend a single element to this buffer and return
* the identity of the buffer.
@@ -90,8 +90,7 @@ trait BufferProxy[A] extends Buffer[A] with Proxy {
*/
def +=:(elem: A): this.type = { self.+=:(elem); this }
- override def ++=:(iter: scala.collection.Traversable[A]): this.type = { self.++=:(iter); this }
- override def ++=:(iter: scala.collection.Iterator[A]): this.type = { self.++=:(iter); this }
+ override def ++=:(xs: TraversableOnce[A]): this.type = { self.++=:(xs); this }
/** Prepend an element to this list.
*
@@ -105,7 +104,7 @@ trait BufferProxy[A] extends Buffer[A] with Proxy {
*
* @param iter the iterable object.
*/
- def prependAll(elems: scala.collection.Iterable[A]) { self.prependAll(elems) }
+ override def prependAll(xs: TraversableOnce[A]) { self.prependAll(xs) }
/** Inserts new elements at the index <code>n</code>. Opposed to method
* <code>update</code>, this method will not replace an element with a
diff --git a/src/library/scala/collection/mutable/Builder.scala b/src/library/scala/collection/mutable/Builder.scala
index c7932ae344..2e6a9149bc 100644
--- a/src/library/scala/collection/mutable/Builder.scala
+++ b/src/library/scala/collection/mutable/Builder.scala
@@ -48,7 +48,7 @@ trait Builder[-Elem, +To] extends Growable[Elem] {
* builder implementations are still required to work correctly even if the hint is
* wrong, i.e. a different number of elements is added.
*
- * @size the hint how many elements will be added.
+ * @param size the hint how many elements will be added.
*/
def sizeHint(size: Int) {}
@@ -64,8 +64,7 @@ trait Builder[-Elem, +To] extends Growable[Elem] {
val self = Builder.this
def +=(x: Elem): this.type = { self += x; this }
def clear() = self.clear()
- override def ++=(xs: Iterator[Elem]): this.type = { self ++= xs; this }
- override def ++=(xs:scala.collection.Traversable[Elem]): this.type = { self ++= xs; this }
+ override def ++=(xs: TraversableOnce[Elem]): this.type = { self ++= xs; this }
def result: NewTo = f(self.result)
}
}
diff --git a/src/library/scala/collection/mutable/ConcurrentMap.scala b/src/library/scala/collection/mutable/ConcurrentMap.scala
index d09bf57e1b..2cfa4f8ae2 100644
--- a/src/library/scala/collection/mutable/ConcurrentMap.scala
+++ b/src/library/scala/collection/mutable/ConcurrentMap.scala
@@ -1,9 +1,5 @@
-package scala.collection.mutable
-
-
-
-
-
+package scala.collection
+package mutable
/**
* A template trait for mutable maps that allow concurrent access.
@@ -66,14 +62,4 @@ trait ConcurrentMap[A, B] extends Map[A, B] {
* @return `Some(v)` if the given key was previously mapped to some value `v`, or `None` otherwise
*/
def replace(k: A, v: B): Option[B]
-
}
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/mutable/DoubleLinkedList.scala b/src/library/scala/collection/mutable/DoubleLinkedList.scala
index 8c50f739e1..718d6aa35d 100644
--- a/src/library/scala/collection/mutable/DoubleLinkedList.scala
+++ b/src/library/scala/collection/mutable/DoubleLinkedList.scala
@@ -40,7 +40,7 @@ class DoubleLinkedList[A]() extends LinearSeq[A]
}
object DoubleLinkedList extends SeqFactory[DoubleLinkedList] {
- implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, DoubleLinkedList[A]] = new GenericCanBuildFrom[A] //new CanBuildFrom[Coll, A, DoubleLinkedList[A]] { : Coll) = from.traversableBuilder[A] }
+ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, DoubleLinkedList[A]] = new GenericCanBuildFrom[A]
def newBuilder[A]: Builder[A, DoubleLinkedList[A]] =
new Builder[A, DoubleLinkedList[A]] {
var current: DoubleLinkedList[A] = _
diff --git a/src/library/scala/collection/mutable/FlatHashTable.scala b/src/library/scala/collection/mutable/FlatHashTable.scala
index 0e73bf7fad..ea4033d405 100644
--- a/src/library/scala/collection/mutable/FlatHashTable.scala
+++ b/src/library/scala/collection/mutable/FlatHashTable.scala
@@ -11,7 +11,7 @@
package scala.collection
package mutable
-/**
+/** An implementation class backing a HashSet.
* @since 2.3
*/
trait FlatHashTable[A] {
@@ -46,7 +46,7 @@ trait FlatHashTable[A] {
private def initialCapacity = capacity(initialSize)
/**
- * Initialises the collection from the input stream. `f` will be called for each element
+ * Initializes the collection from the input stream. `f` will be called for each element
* read from the input stream in the order determined by the stream. This is useful for
* structures where iteration order is important (e.g. LinkedHashSet).
*
diff --git a/src/library/scala/collection/mutable/GrowingBuilder.scala b/src/library/scala/collection/mutable/GrowingBuilder.scala
new file mode 100644
index 0000000000..445e9d4f3e
--- /dev/null
+++ b/src/library/scala/collection/mutable/GrowingBuilder.scala
@@ -0,0 +1,30 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package mutable
+
+import generic._
+
+/** The canonical builder for collections that are growable, i.e. that support an
+ * efficient `+=` method which adds an element to the collection. It is
+ * almost identical to AddingBuilder, but necessitated by the existence of
+ * classes which are Growable but not Addable, which is a result of covariance
+ * interacting surprisingly with any2stringadd thus driving '+' out of the Seq
+ * hierarchy. The tendrils of original sin should never be underestimated.
+ *
+ * @author Paul Phillips
+ * @version 2.8
+ * @since 2.8
+ */
+class GrowingBuilder[Elem, To <: Growable[Elem]](empty: To) extends Builder[Elem, To] {
+ protected var elems: To = empty
+ def +=(x: Elem): this.type = { elems += x; this }
+ def clear() { elems = empty }
+ def result: To = elems
+}
diff --git a/src/library/scala/collection/mutable/HashMap.scala b/src/library/scala/collection/mutable/HashMap.scala
index 658c574087..2b5cad37d8 100644
--- a/src/library/scala/collection/mutable/HashMap.scala
+++ b/src/library/scala/collection/mutable/HashMap.scala
@@ -67,7 +67,7 @@ class HashMap[A, B] extends Map[A, B]
}
/* Override to avoid tuple allocation in foreach */
- override def valuesIterable: collection.Iterable[B] = new DefaultValuesIterable {
+ override def values: collection.Iterable[B] = new DefaultValuesIterable {
override def foreach[C](f: B => C) = foreachEntry(e => f(e.value))
}
diff --git a/src/library/scala/collection/mutable/HashSet.scala b/src/library/scala/collection/mutable/HashSet.scala
index f1f2ed3274..e985e717b0 100644
--- a/src/library/scala/collection/mutable/HashSet.scala
+++ b/src/library/scala/collection/mutable/HashSet.scala
@@ -50,7 +50,7 @@ class HashSet[A] extends Set[A]
}
}
- override def clone(): Set[A] = new HashSet[A] ++= this
+ override def clone() = new HashSet[A] ++= this
private def writeObject(s: java.io.ObjectOutputStream) {
serializeTo(s)
diff --git a/src/library/scala/collection/mutable/HashTable.scala b/src/library/scala/collection/mutable/HashTable.scala
index aa7993ed14..14f1720a4c 100644
--- a/src/library/scala/collection/mutable/HashTable.scala
+++ b/src/library/scala/collection/mutable/HashTable.scala
@@ -65,7 +65,7 @@ trait HashTable[A] {
private def initialCapacity = capacity(initialSize)
/**
- * Initialises the collection from the input stream. `f` will be called for each key/value pair
+ * Initializes the collection from the input stream. `f` will be called for each key/value pair
* read from the input stream in the order determined by the stream. This is useful for
* structures where iteration order is important (e.g. LinkedHashMap).
*/
diff --git a/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala b/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala
index d6b3115b81..fba28e7c2a 100644
--- a/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala
+++ b/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala
@@ -12,6 +12,7 @@
package scala.collection
package mutable
+import annotation.migration
/** This class can be used as an adaptor to create mutable maps from
* immutable map implementations. Only method <code>empty</code> has
@@ -41,19 +42,17 @@ extends Map[A, B]
override def isDefinedAt(key: A) = imap.isDefinedAt(key)
- override def keySet: scala.collection.Set[A] = imap.keySet
+ override def keySet: collection.Set[A] = imap.keySet
override def keysIterator: Iterator[A] = imap.keysIterator
- @deprecated("use `keysIterator' instead")
- override def keys: Iterator[A] = imap.keysIterator
-
- override def valuesIterable: scala.collection.Iterable[B] = imap.valuesIterable
+ @migration(2, 8, "As of 2.8, keys returns Iterable[A] rather than Iterator[A].")
+ override def keys: collection.Iterable[A] = imap.keys
override def valuesIterator: Iterator[B] = imap.valuesIterator
- @deprecated("use `valuesIterator' instead")
- override def values: Iterator[B] = imap.valuesIterator
+ @migration(2, 8, "As of 2.8, values returns Iterable[B] rather than Iterator[B].")
+ override def values: collection.Iterable[B] = imap.values
def iterator: Iterator[(A, B)] = imap.iterator
diff --git a/src/library/scala/collection/mutable/IndexedSeq.scala b/src/library/scala/collection/mutable/IndexedSeq.scala
index b11131e917..0a173395e0 100644
--- a/src/library/scala/collection/mutable/IndexedSeq.scala
+++ b/src/library/scala/collection/mutable/IndexedSeq.scala
@@ -16,6 +16,9 @@ import generic._
/** A subtrait of <code>collection.IndexedSeq</code> which represents sequences
* that can be mutated.
+ * $indexedSeqInfo
+ *
+ * @since 2.8
*/
trait IndexedSeq[A] extends Seq[A]
with scala.collection.IndexedSeq[A]
diff --git a/src/library/scala/collection/mutable/IndexedSeqOptimized.scala b/src/library/scala/collection/mutable/IndexedSeqOptimized.scala
new file mode 100755
index 0000000000..134cc2a8ea
--- /dev/null
+++ b/src/library/scala/collection/mutable/IndexedSeqOptimized.scala
@@ -0,0 +1,21 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+// $Id: IndexedSeqLike.scala 20129 2009-12-14 17:12:17Z odersky $
+
+
+package scala.collection
+package mutable
+import generic._
+
+/** A subtrait of scala.collection.IndexedSeq which represents sequences
+ * that can be mutated.
+ *
+ * @since 2.8
+ */
+trait IndexedSeqOptimized[A, +Repr] extends IndexedSeqLike[A, Repr] with scala.collection.IndexedSeqOptimized[A, Repr]
diff --git a/src/library/scala/collection/mutable/IndexedSeqView.scala b/src/library/scala/collection/mutable/IndexedSeqView.scala
index e864845455..d870b762d3 100644
--- a/src/library/scala/collection/mutable/IndexedSeqView.scala
+++ b/src/library/scala/collection/mutable/IndexedSeqView.scala
@@ -25,7 +25,10 @@ import TraversableView.NoBuilder
* @version 2.8
* @since 2.8
*/
-trait IndexedSeqView[A, +Coll] extends scala.collection.IndexedSeqView[A, Coll] {
+trait IndexedSeqView[A, +Coll] extends IndexedSeq[A]
+ with IndexedSeqOptimized[A, IndexedSeqView[A, Coll]]
+ with scala.collection.SeqView[A, Coll]
+ with scala.collection.SeqViewLike[A, Coll, IndexedSeqView[A, Coll]] {
self =>
def update(idx: Int, elem: A)
@@ -88,9 +91,22 @@ self =>
override def reverse: IndexedSeqView[A, Coll] = newReversed.asInstanceOf[IndexedSeqView[A, Coll]]
}
-/*
- * object IndexedSeqView {
- type Coll = TraversableView[_, C] forSome { type C <: scala.collection.Traversable[_] }
- implicit def canBuildFrom[A]: CanBuildFrom[IndexedSeq[_], A, IndexedSeqView[A], Coll] = new CanBuildFrom[mutable.IndexedSeq[_], A, IndexedSeqView[A], Coll] { : Coll) = new NoBuilder }
+/** $factoryInfo
+ * Note that the canBuildFrom factories yield SeqViews, not IndexedSewqViews.
+ * This is intentional, because not all operations yield again a mutable.IndexedSeqView.
+ * For instance, map just gives a SeqView, which reflects the fact that
+ * map cannot do its work and maintain a pointer into the original indexed sequence.
+ */
+object IndexedSeqView {
+ type Coll = TraversableView[_, C] forSome {type C <: Traversable[_]}
+ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, SeqView[A, Seq[_]]] =
+ new CanBuildFrom[Coll, A, SeqView[A, Seq[_]]] {
+ def apply(from: Coll) = new NoBuilder
+ def apply() = new NoBuilder
+ }
+ implicit def arrCanBuildFrom[A]: CanBuildFrom[TraversableView[_, Array[_]], A, SeqView[A, Array[A]]] =
+ new CanBuildFrom[TraversableView[_, Array[_]], A, SeqView[A, Array[A]]] {
+ def apply(from: TraversableView[_, Array[_]]) = new NoBuilder
+ def apply() = new NoBuilder
+ }
}
-*/
diff --git a/src/library/scala/collection/mutable/LazyBuilder.scala b/src/library/scala/collection/mutable/LazyBuilder.scala
index 2ce1fa9827..7714d29f08 100644
--- a/src/library/scala/collection/mutable/LazyBuilder.scala
+++ b/src/library/scala/collection/mutable/LazyBuilder.scala
@@ -20,10 +20,9 @@ import immutable.{List, Nil}
*/
abstract class LazyBuilder[Elem, +To] extends Builder[Elem, To] {
/** The different segments of elements to be added to the builder, represented as iterators */
- protected var parts = new ListBuffer[scala.collection.Traversable[Elem]]
+ protected var parts = new ListBuffer[TraversableOnce[Elem]]
def +=(x: Elem): this.type = { parts += List(x); this }
- override def ++=(xs: Iterator[Elem]): this.type = { parts += xs.toStream; this }
- override def ++=(xs: scala.collection.Traversable[Elem]): this.type = { parts += xs; this }
+ override def ++=(xs: TraversableOnce[Elem]): this.type = { parts += xs ; this }
def result(): To
def clear() { parts.clear() }
}
diff --git a/src/library/scala/collection/mutable/LinearSeq.scala b/src/library/scala/collection/mutable/LinearSeq.scala
index 25d7ef6be8..9abaef5aff 100644
--- a/src/library/scala/collection/mutable/LinearSeq.scala
+++ b/src/library/scala/collection/mutable/LinearSeq.scala
@@ -14,8 +14,9 @@ package mutable
import generic._
-/** A subtrait of <code>collection.Seq</code> which represents sequences
- * that cannot be mutated.
+/** A subtrait of <code>collection.LinearSeq</code> which represents sequences
+ * that can be mutated.
+ * $linearSeqInfo
*
* @since 2.8
*/
diff --git a/src/library/scala/collection/mutable/LinkedListLike.scala b/src/library/scala/collection/mutable/LinkedListLike.scala
index c363609762..2523ece370 100644
--- a/src/library/scala/collection/mutable/LinkedListLike.scala
+++ b/src/library/scala/collection/mutable/LinkedListLike.scala
@@ -55,7 +55,7 @@ trait LinkedListLike[A, This <: Seq[A] with LinkedListLike[A, This]] extends Seq
}
/** Insert linked list `that` at current position of this linked list
- * @pre this linked list is not empty
+ * @note this linked list must not be empty
*/
def insert(that: This): Unit = {
require(nonEmpty, "insert into empty list")
diff --git a/src/library/scala/collection/mutable/ListBuffer.scala b/src/library/scala/collection/mutable/ListBuffer.scala
index 686b1acf8d..b8e5aeb262 100644
--- a/src/library/scala/collection/mutable/ListBuffer.scala
+++ b/src/library/scala/collection/mutable/ListBuffer.scala
@@ -231,7 +231,7 @@ final class ListBuffer[A]
*
* @param n the index which refers to the element to delete.
* @return n the element that was formerly at position <code>n</code>.
- * @pre an element exists at position <code>n</code>
+ * @note an element must exists at position <code>n</code>
* @throws Predef.IndexOutOfBoundsException if <code>n</code> is out of bounds.
*/
def remove(n: Int): A = {
@@ -335,5 +335,5 @@ final class ListBuffer[A]
*/
object ListBuffer extends SeqFactory[ListBuffer] {
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, ListBuffer[A]] = new GenericCanBuildFrom[A]
- def newBuilder[A]: Builder[A, ListBuffer[A]] = new AddingBuilder(new ListBuffer[A])
+ def newBuilder[A]: Builder[A, ListBuffer[A]] = new GrowingBuilder(new ListBuffer[A])
}
diff --git a/src/library/scala/collection/mutable/ListMap.scala b/src/library/scala/collection/mutable/ListMap.scala
index 82cc6340c0..c96873c81d 100644
--- a/src/library/scala/collection/mutable/ListMap.scala
+++ b/src/library/scala/collection/mutable/ListMap.scala
@@ -14,7 +14,7 @@ package mutable
import generic._
-/**
+/** A simple map backed by a list.
* @since 2.8
*/
@serializable
diff --git a/src/library/scala/collection/mutable/MapLike.scala b/src/library/scala/collection/mutable/MapLike.scala
index a1bb25910a..9c3c1c0e5f 100644
--- a/src/library/scala/collection/mutable/MapLike.scala
+++ b/src/library/scala/collection/mutable/MapLike.scala
@@ -13,6 +13,7 @@ package scala.collection
package mutable
import generic._
+import annotation.migration
/** A template trait for mutable maps of type `mutable.Map[A, B]` which
* associate keys of type `A` with values of type `B`.
@@ -41,7 +42,7 @@ import generic._
* {{{
* def> empty: This
* }}}
- * If you wish to avoid the unncessary construction of an `Option`
+ * If you wish to avoid the unnecessary construction of an `Option`
* object, you could also override `apply`, `update`,
* and `delete`.
@@ -51,7 +52,7 @@ import generic._
* @define Coll mutable.Map
*/
trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
- extends MapLikeBase[A, B, This]
+ extends scala.collection.MapLike[A, B, This]
with Builder[(A, B), This]
with Growable[(A, B)]
with Shrinkable[A]
@@ -101,23 +102,24 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
def += (kv: (A, B)): this.type
/** Creates a new map consisting of all key/value pairs of the current map
- * plus a new pair of a guven key and value.
+ * plus a new pair of a given key and value.
*
* @param key The key to add
* @param value The new value
* @return A fresh immutable map with the binding from `key` to
* `value` added to this map.
*/
- override def updated[B1 >: B](key: A, value: B1): mutable.Map[A, B1] = this + ((key, value))
+ override def updated[B1 >: B](key: A, value: B1): Map[A, B1] = this + ((key, value))
/** Add a new key/value mapping and return the map itself.
*
* @param kv the key/value mapping to be added
*/
- @deprecated("This operation will create a new map in the future. To add an element as a side\n"+
- "effect to an existing map and return that map itself, use +=. If you do want\n"+
- "to create a fresh map, you can use `clone() +=' to avoid a @deprecated warning.")
- def + (kv: (A, B)): this.type = { update(kv._1, kv._2); this }
+ @migration(2, 8,
+ "As of 2.8, this operation creates a new map. To add an element as a\n"+
+ "side effect to an existing map and return that map itself, use +=."
+ )
+ def + [B1 >: B] (kv: (A, B1)): Map[A, B1] = clone().asInstanceOf[Map[A, B1]] += kv
/** Adds two or more key/value mappings and return the map itself.
* with the added elements.
@@ -126,11 +128,12 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
* @param elem2 the second element to add.
* @param elems the remaining elements to add.
*/
- @deprecated("This operation will create a new map in the future. To add an element as a side\n"+
- "effect to an existing map and return that map itself, use +=. If you do want to\n"+
- "create a fresh map, you can use `clone() +=` to avoid a @deprecated warning.")
- def +(elem1: (A, B), elem2: (A, B), elems: (A, B)*): this.type =
- this += elem1 += elem2 ++= elems
+ @migration(2, 8,
+ "As of 2.8, this operation creates a new map. To add an element as a\n"+
+ "side effect to an existing map and return that map itself, use +=."
+ )
+ override def + [B1 >: B] (elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *): Map[A, B1] =
+ clone().asInstanceOf[Map[A, B1]] += elem1 += elem2 ++= elems
/** Adds a number of elements provided by a traversable object
* via its `iterator` method and returns
@@ -139,21 +142,12 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
*
* @param iter the traversable object.
*/
- @deprecated("This operation will create a new map in the future. To add elements as a side\n"+
- "effect to an existing map and return that map itself, use ++=. If you do want\n"+
- "to create a fresh map, you can use `clone() ++=` to avoid a @deprecated warning.")
- def ++(iter: Traversable[(A, B)]): this.type = { for (elem <- iter) +=(elem); this }
-
- /** Adds a number of elements provided by an iterator
- * via its `iterator` method and returns
- * the collection itself.
- *
- * @param iter the iterator
- */
- @deprecated("This operation will create a new map in the future. To add elements as a side\n"+
- "effect to an existing map and return that map itself, use ++=. If you do want\n"+
- "to create a fresh map, you can use `clone() +=` to avoid a @deprecated warning.")
- def ++(iter: Iterator[(A, B)]): this.type = { for (elem <- iter) +=(elem); this }
+ @migration(2, 8,
+ "As of 2.8, this operation creates a new map. To add the elements as a\n"+
+ "side effect to an existing map and return that map itself, use ++=."
+ )
+ override def ++[B1 >: B](xs: TraversableOnce[(A, B1)]): Map[A, B1] =
+ clone().asInstanceOf[Map[A, B1]] ++= xs
/** Removes a key from this map, returning the value associated previously
* with that key as an option.
@@ -176,30 +170,31 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
/** Delete a key from this map if it is present and return the map itself.
* @param key the key to be removed
*/
- @deprecated("This operation will create a new map in the future. To add elements as a side\n"+
- "effect to an existing map and return that map itself, use -=. If you do want\n"+
- "to create a fresh map, you can use `clone() -=` to avoid a @deprecated warning.")
- override def -(key: A): This = { -=(key); repr }
+ @migration(2, 8,
+ "As of 2.8, this operation creates a new map. To remove an element as a\n"+
+ "side effect to an existing map and return that map itself, use -=."
+ )
+ override def -(key: A): This = clone() -= key
/** If given key is defined in this map, remove it and return associated value as an Option.
* If key is not present return None.
* @param key the key to be removed
*/
- @deprecated("Use `remove' instead") def removeKey(key: A): Option[B] = remove(key)
+ @deprecated("Use `remove' instead") def removeKey(key: A): Option[B] = remove(key)
/** Removes all bindings from the map. After this operation has completed,
* the map will be empty.
*/
- def clear() { for ((k, v) <- this.iterator) -=(k) }
+ def clear() { keysIterator foreach -= }
/** If given key is already in this map, returns associated value
* Otherwise, computes value from given expression `op`, stores with key
* in map and returns that value.
- * @param the key to test
- * @param the computation yielding the value to associate with `key`, if
- * `key` is previosuly unbound.
- * @return the value associated with key (either previously or as a result
- * of executing the method).
+ * @param key the key to test
+ * @param op the computation yielding the value to associate with `key`, if
+ * `key` is previously unbound.
+ * @return the value associated with key (either previously or as a result
+ * of executing the method).
*/
def getOrElseUpdate(key: A, op: => B): B =
get(key) match {
@@ -209,7 +204,7 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
/** Applies a transformation function to all values contained in this map.
* The transformation function produces new values from existing keys
- * asssociated values.
+ * associated values.
*
* @param f the transformation to apply
* @return the map itself.
@@ -232,8 +227,7 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
this
}
- override def clone(): This =
- empty ++= repr
+ override def clone(): This = empty ++= repr
/** The result when this map is used as a builder
* @return the map representation itself.
@@ -247,35 +241,21 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
* @param elem2 the second element to remove.
* @param elems the remaining elements to remove.
*/
- @deprecated("Use -= instead if you intend to remove by side effect from an existing collection.\n"+
- "Use `clone() -=' if you intend to create a new collection.")
- override def -(elem1: A, elem2: A, elems: A*): This = {
- this -= elem1 -= elem2 --= elems
- repr
- }
+ @migration(2, 8,
+ "As of 2.8, this operation creates a new map. To remove an element as a\n"+
+ "side effect to an existing map and return that map itself, use -=."
+ )
+ override def -(elem1: A, elem2: A, elems: A*): This =
+ clone() -= elem1 -= elem2 --= elems
/** Removes a number of elements provided by a Traversable object and returns
* the collection itself.
*
* @param iter the Traversable object.
*/
- @deprecated("Use --= instead if you intend to remove by side effect from an existing collection.\n"+
- "Use `clone() --=' if you intend to create a new collection.")
- override def --(iter: Traversable[A]): This = {
- for (elem <- iter) -=(elem)
- repr
- }
-
-
- /** Removes a number of elements provided by an iterator and returns
- * the collection itself.
- *
- * @param iter the iterator
- */
- @deprecated("Use --= instead if you intend to remove by side effect from an existing collection.\n"+
- "Use `clone() --=' if you intend to create a new collection.")
- override def --(iter: Iterator[A]): This = {
- for (elem <- iter) -=(elem)
- repr
- }
+ @migration(2, 8,
+ "As of 2.8, this operation creates a new map. To remove the elements as a\n"+
+ "side effect to an existing map and return that map itself, use --=."
+ )
+ override def --(xs: TraversableOnce[A]): This = clone() --= xs
}
diff --git a/src/library/scala/collection/mutable/MapLikeBase.scala b/src/library/scala/collection/mutable/MapLikeBase.scala
deleted file mode 100644
index 402df79d84..0000000000
--- a/src/library/scala/collection/mutable/MapLikeBase.scala
+++ /dev/null
@@ -1,37 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-
-package scala.collection
-package mutable
-
-/** The reason for this class is so that we can
- * have both a generic immutable `+` with signature
- *
- * def + [B1 >: B](kv: (A, B1)): Map[A, B1]
- *
- * and a (deprecated) mutable `+` of signature
- *
- * def + (kv: (A, B)): this.type = this += kv
- *
- * The former is required to fulfill the Map contract.
- * The latter is required for backwards compatibility.
- * We can't have both methods in the same class, as that would give a double definition.
- * They are OK in different classes though, and narrowly escape a `same erasure' problem.
- * Once the deprecated + goes away we can do without class MapLikeBase.
- *
- * @author Martin Odersky
- * @version 2.8
- * @since 2.8
- */
-trait MapLikeBase[A, B, +This <: MapLikeBase[A, B, This] with Map[A, B]]
- extends scala.collection.MapLike[A, B, This] with Cloneable[This] {
- def + [B1 >: B] (kv: (A, B1)): mutable.Map[A, B1] = clone().asInstanceOf[mutable.Map[A, B1]] += kv
-}
diff --git a/src/library/scala/collection/mutable/MapProxy.scala b/src/library/scala/collection/mutable/MapProxy.scala
index 55e6eba1e3..cb768f6778 100644
--- a/src/library/scala/collection/mutable/MapProxy.scala
+++ b/src/library/scala/collection/mutable/MapProxy.scala
@@ -28,14 +28,16 @@ package mutable
trait MapProxy[A, B] extends Map[A, B] with MapProxyLike[A, B, Map[A, B]]
{
+ private def newProxy[B1 >: B](newSelf: Map[A, B1]): MapProxy[A, B1] =
+ new MapProxy[A, B1] { val self = newSelf }
+
override def repr = this
override def empty: MapProxy[A, B] = new MapProxy[A, B] { val self = MapProxy.this.self.empty }
- override def +(kv: (A, B)) = { self.update(kv._1, kv._2) ; this }
- override def + [B1 >: B] (elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *) =
- { self.+(elem1, elem2, elems: _*) ; this }
+ override def + [B1 >: B] (kv: (A, B1)): Map[A, B1] = newProxy(self + kv)
+ override def + [B1 >: B] (elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *) = newProxy(self.+(elem1, elem2, elems: _*))
- override def -(key: A) = { self.remove(key); this }
+ override def -(key: A) = newProxy(self - key)
override def += (kv: (A, B)) = { self += kv ; this }
override def -= (key: A) = { self -= key ; this }
diff --git a/src/library/scala/collection/mutable/MultiMap.scala b/src/library/scala/collection/mutable/MultiMap.scala
index e335500349..01ddea070c 100644
--- a/src/library/scala/collection/mutable/MultiMap.scala
+++ b/src/library/scala/collection/mutable/MultiMap.scala
@@ -25,6 +25,9 @@ package mutable
trait MultiMap[A, B] extends Map[A, Set[B]] {
protected def makeSet: Set[B] = new HashSet[B]
+ @deprecated("use addBinding instead")
+ def add(key: A, value: B): this.type = addBinding(key, value)
+
def addBinding(key: A, value: B): this.type = {
get(key) match {
case None =>
diff --git a/src/library/scala/collection/mutable/MutableList.scala b/src/library/scala/collection/mutable/MutableList.scala
index e6423aa677..7784927c87 100644
--- a/src/library/scala/collection/mutable/MutableList.scala
+++ b/src/library/scala/collection/mutable/MutableList.scala
@@ -29,7 +29,7 @@ import immutable.{List, Nil}
*/
@serializable @SerialVersionUID(5938451523372603072L)
class MutableList[A] extends LinearSeq[A]
- with LinearSeqLike[A, MutableList[A]]
+ with LinearSeqOptimized[A, MutableList[A]]
with Builder[A, MutableList[A]] {
override protected[this] def newBuilder = new MutableList[A]
diff --git a/src/library/scala/collection/mutable/OpenHashMap.scala b/src/library/scala/collection/mutable/OpenHashMap.scala
index 9f0d9d2c25..79bb96a0bf 100644
--- a/src/library/scala/collection/mutable/OpenHashMap.scala
+++ b/src/library/scala/collection/mutable/OpenHashMap.scala
@@ -15,7 +15,7 @@ package mutable
/**
* @since 2.7
*/
-object OpenHashMap{
+object OpenHashMap {
def apply[K, V](elems : (K, V)*) = {
val dict = new OpenHashMap[K, V];
elems.foreach({case (x, y) => dict(x) = y});
diff --git a/src/library/scala/collection/mutable/PriorityQueue.scala b/src/library/scala/collection/mutable/PriorityQueue.scala
index c4dac9effb..4d74a2ee74 100644
--- a/src/library/scala/collection/mutable/PriorityQueue.scala
+++ b/src/library/scala/collection/mutable/PriorityQueue.scala
@@ -13,7 +13,7 @@ package scala.collection
package mutable
import generic._
-
+import annotation.migration
/** This class implements priority queues using a heap.
* To prioritize elements of type T there must be an implicit
@@ -28,7 +28,6 @@ import generic._
class PriorityQueue[A](implicit ord: Ordering[A])
extends Seq[A]
with SeqLike[A, PriorityQueue[A]]
- with Addable[A, PriorityQueue[A]]
with Growable[A]
with Cloneable[PriorityQueue[A]]
with Builder[A, PriorityQueue[A]]
@@ -47,8 +46,8 @@ class PriorityQueue[A](implicit ord: Ordering[A])
private val resarr = new ResizableArrayAccess[A]
- resarr.p_size0 += 1 // we do not use array(0)
- override def length: Int = resarr.length - 1 // adjust length accordingly
+ resarr.p_size0 += 1 // we do not use array(0)
+ override def length: Int = resarr.length - 1 // adjust length accordingly
override def size: Int = length
override def isEmpty: Boolean = resarr.p_size0 < 2
override def repr = this
@@ -116,6 +115,23 @@ class PriorityQueue[A](implicit ord: Ordering[A])
}
}
+ @deprecated(
+ "Use += instead if you intend to add by side effect to an existing collection.\n"+
+ "Use `clone() +=' if you intend to create a new collection."
+ )
+ def +(elem: A): PriorityQueue[A] = { this.clone() += elem }
+
+ /** Add two or more elements to this set.
+ * @param elem1 the first element.
+ * @param kv2 the second element.
+ * @param kvs the remaining elements.
+ */
+ @deprecated(
+ "Use ++= instead if you intend to add by side effect to an existing collection.\n"+
+ "Use `clone() ++=' if you intend to create a new collection."
+ )
+ def +(elem1: A, elem2: A, elems: A*) = { this.clone().+=(elem1, elem2, elems : _*) }
+
/** Inserts a single element into the priority queue.
*
* @param elem the element to insert
@@ -128,27 +144,12 @@ class PriorityQueue[A](implicit ord: Ordering[A])
this
}
- def +(elem: A): PriorityQueue[A] = { this.clone() += elem }
-
- /** Add two or more elements to this set.
- * @param elem1 the first element.
- * @param kv2 the second element.
- * @param kvs the remaining elements.
- */
- override def +(elem1: A, elem2: A, elems: A*) = { this.clone().+=(elem1, elem2, elems : _*) }
-
/** Adds all elements provided by an <code>Iterable</code> object
* into the priority queue.
*
* @param iter an iterable object
*/
- override def ++(elems: scala.collection.Traversable[A]) = { this.clone() ++= elems }
-
- /** Adds all elements provided by an iterator into the priority queue.
- *
- * @param it an iterator
- */
- override def ++(iter: Iterator[A]) = { this.clone() ++= iter } // ...whereas this doesn't?
+ def ++(xs: TraversableOnce[A]) = { this.clone() ++= xs }
/** Adds all elements to the queue.
*
@@ -223,7 +224,7 @@ class PriorityQueue[A](implicit ord: Ordering[A])
}
override def reverseIterator = new Iterator[A] {
- val arr = new Array[Any](size)
+ val arr = new Array[Any](PriorityQueue.this.size)
iterator.copyToArray(arr)
var i = arr.size - 1
def hasNext: Boolean = i >= 0
@@ -267,9 +268,9 @@ class PriorityQueue[A](implicit ord: Ordering[A])
// }
}
-
-
-
-
-
-
+// !!! TODO - but no SortedSeqFactory (yet?)
+// object PriorityQueue extends SeqFactory[PriorityQueue] {
+// def empty[A](implicit ord: Ordering[A]): PriorityQueue[A] = new PriorityQueue[A](ord)
+// implicit def canBuildFrom[A](implicit ord: Ordering[A]): CanBuildFrom[Coll, A, PriorityQueue] =
+// }
+// \ No newline at end of file
diff --git a/src/library/scala/collection/mutable/PriorityQueueProxy.scala b/src/library/scala/collection/mutable/PriorityQueueProxy.scala
index 0771ce6b60..427ffe478a 100644
--- a/src/library/scala/collection/mutable/PriorityQueueProxy.scala
+++ b/src/library/scala/collection/mutable/PriorityQueueProxy.scala
@@ -48,21 +48,11 @@ abstract class PriorityQueueProxy[A](implicit ord: Ordering[A]) extends Priority
*/
override def +=(elem: A): this.type = { self += elem; this }
- /** Adds all elements provided by an <code>Iterable</code> object
- * into the priority queue.
- *
- * @param iter an iterable object
- */
- def ++=(iter: scala.collection.Iterable[A]): this.type = {
- self ++= iter
- this
- }
-
/** Adds all elements provided by an iterator into the priority queue.
*
* @param it an iterator
*/
- override def ++=(it: Iterator[A]): this.type = {
+ override def ++=(it: TraversableOnce[A]): this.type = {
self ++= it
this
}
diff --git a/src/library/scala/collection/mutable/Publisher.scala b/src/library/scala/collection/mutable/Publisher.scala
index 4f675eff9f..58e4394ef7 100644
--- a/src/library/scala/collection/mutable/Publisher.scala
+++ b/src/library/scala/collection/mutable/Publisher.scala
@@ -47,7 +47,7 @@ trait Publisher[Evt] {
def removeSubscriptions() { filters.clear }
protected def publish(event: Evt) {
- filters.keysIterator.foreach(sub =>
+ filters.keys.foreach(sub =>
if (!suspended.contains(sub) &&
filters.entryExists(sub, p => p(event)))
sub.notify(self, event)
diff --git a/src/library/scala/collection/mutable/Queue.scala b/src/library/scala/collection/mutable/Queue.scala
index 3b09ceba91..3754dbc3f2 100644
--- a/src/library/scala/collection/mutable/Queue.scala
+++ b/src/library/scala/collection/mutable/Queue.scala
@@ -24,7 +24,6 @@ import generic._
*/
@serializable @cloneable
class Queue[A] extends MutableList[A] with Cloneable[Queue[A]] {
-
/** Adds all elements to the queue.
*
* @param elems the elements to add.
@@ -144,3 +143,8 @@ class Queue[A] extends MutableList[A] with Cloneable[Queue[A]] {
*/
def front: A = first0.elem
}
+
+// !!! TODO - integrate
+object Queue {
+ def apply[A](xs: A*): Queue[A] = new Queue[A] ++= xs
+}
diff --git a/src/library/scala/collection/mutable/QueueProxy.scala b/src/library/scala/collection/mutable/QueueProxy.scala
index a322934f63..b2548b26cc 100644
--- a/src/library/scala/collection/mutable/QueueProxy.scala
+++ b/src/library/scala/collection/mutable/QueueProxy.scala
@@ -45,24 +45,13 @@ trait QueueProxy[A] extends Queue[A] with Proxy {
*/
override def +=(elem: A): this.type = { self += elem; this }
- /** Adds all elements provided by an <code>Iterable</code> object
- * at the end of the queue. The elements are prepended in the order they
- * are given out by the iterator.
- *
- * @param iter an iterable object
- */
- def ++=(iter: scala.collection.Iterable[A]): this.type = {
- self ++= iter
- this
- }
-
/** Adds all elements provided by an iterator
* at the end of the queue. The elements are prepended in the order they
* are given out by the iterator.
*
* @param iter an iterator
*/
- override def ++=(it: Iterator[A]): this.type = {
+ override def ++=(it: TraversableOnce[A]): this.type = {
self ++= it
this
}
diff --git a/src/library/scala/collection/mutable/ResizableArray.scala b/src/library/scala/collection/mutable/ResizableArray.scala
index 435eb3ee0f..80ab1cd559 100644
--- a/src/library/scala/collection/mutable/ResizableArray.scala
+++ b/src/library/scala/collection/mutable/ResizableArray.scala
@@ -24,7 +24,7 @@ import generic._
*/
trait ResizableArray[A] extends IndexedSeq[A]
with GenericTraversableTemplate[A, ResizableArray]
- with IndexedSeqLike[A, ResizableArray[A]] {
+ with IndexedSeqOptimized[A, ResizableArray[A]] {
override def companion: GenericCompanion[ResizableArray] = ResizableArray
diff --git a/src/library/scala/collection/mutable/SetBuilder.scala b/src/library/scala/collection/mutable/SetBuilder.scala
index 6286c46ac4..450d76463c 100644
--- a/src/library/scala/collection/mutable/SetBuilder.scala
+++ b/src/library/scala/collection/mutable/SetBuilder.scala
@@ -13,17 +13,10 @@ package mutable
import generic._
-/** The canonical builder for collections that are addable, i.e. that support
- * an efficient + method which adds an element to the collection.
- * Collections are built from their empty element using this + method.
- * @param empty The empty element of the collection.
+/** The canonical builder for mutable Sets.
*
+ * @param empty The empty element of the collection.
* @since 2.8
*/
-class SetBuilder[A, Coll <: Addable[A, Coll] with scala.collection.Iterable[A] with scala.collection.IterableLike[A, Coll]](empty: Coll)
-extends Builder[A, Coll] {
- protected var elems: Coll = empty
- def +=(x: A): this.type = { elems = elems + x; this }
- def clear() { elems = empty }
- def result: Coll = elems
-}
+class SetBuilder[A, Coll <: Addable[A, Coll] with collection.Iterable[A] with collection.IterableLike[A, Coll]](empty: Coll)
+extends AddingBuilder[A, Coll](empty) { }
diff --git a/src/library/scala/collection/mutable/SetLike.scala b/src/library/scala/collection/mutable/SetLike.scala
index cb6eb293c1..7004e52b8e 100644
--- a/src/library/scala/collection/mutable/SetLike.scala
+++ b/src/library/scala/collection/mutable/SetLike.scala
@@ -14,6 +14,7 @@ package mutable
import generic._
import script._
+import scala.annotation.migration
/** A template trait for mutable sets of type `mutable.Set[A]`.
* @tparam A the type of the elements of the set
@@ -63,6 +64,9 @@ trait SetLike[A, +This <: SetLike[A, This] with Set[A]]
*/
override protected[this] def newBuilder: Builder[A, This] = empty
+ @migration(2, 8, "Set.map now returns a Set, so it will discard duplicate values.")
+ override def map[B, That](f: A => B)(implicit bf: CanBuildFrom[This, B, That]): That = super.map(f)(bf)
+
/** Adds an element to this $coll.
*
* @param elem the element to be added
@@ -119,7 +123,7 @@ trait SetLike[A, +This <: SetLike[A, This] with Set[A]]
*/
def clear() { foreach(-=) }
- override def clone(): mutable.Set[A] = empty ++= repr
+ override def clone(): This = empty ++= repr
/** The result when this set is used as a builder
* @return the set representation itself.
@@ -131,9 +135,11 @@ trait SetLike[A, +This <: SetLike[A, This] with Set[A]]
*
* @param elem the element to add.
*/
- @deprecated("Use += instead if you intend to add by side effect to an existing collection.\n"+
- "Use `clone() +=' if you intend to create a new collection.")
- override def + (elem: A): This = { +=(elem); repr }
+ @migration(2, 8,
+ "As of 2.8, this operation creates a new set. To add an element as a\n"+
+ "side effect to an existing set and return that set itself, use +=."
+ )
+ override def + (elem: A): This = clone() += elem
/** Adds two or more elements to this collection and returns
* the collection itself.
@@ -142,45 +148,34 @@ trait SetLike[A, +This <: SetLike[A, This] with Set[A]]
* @param elem2 the second element to add.
* @param elems the remaining elements to add.
*/
- @deprecated("Use += instead if you intend to add by side effect to an existing collection.\n"+
- "Use `clone() +=' if you intend to create a new collection.")
- override def + (elem1: A, elem2: A, elems: A*): This = {
- this += elem1 += elem2 ++= elems
- repr
- }
+ @migration(2, 8,
+ "As of 2.8, this operation creates a new set. To add the elements as a\n"+
+ "side effect to an existing set and return that set itself, use +=."
+ )
+ override def + (elem1: A, elem2: A, elems: A*): This =
+ clone() += elem1 += elem2 ++= elems
/** Adds a number of elements provided by a traversable object and returns
* either the collection itself.
*
* @param iter the iterable object.
*/
- @deprecated("Use ++= instead if you intend to add by side effect to an existing collection.\n"+
- "Use `clone() ++=' if you intend to create a new collection.")
- override def ++(iter: scala.collection.Traversable[A]): This = {
- for (elem <- iter) +=(elem)
- repr
- }
-
- /** Adds a number of elements provided by an iterator and returns
- * the collection itself.
- *
- * @param iter the iterator
- */
- @deprecated("Use ++= instead if you intend to add by side effect to an existing collection.\n"+
- "Use `clone() ++=' if you intend to create a new collection.")
- override def ++ (iter: Iterator[A]): This = {
- for (elem <- iter) +=(elem)
- repr
- }
+ @migration(2, 8,
+ "As of 2.8, this operation creates a new set. To add the elements as a\n"+
+ "side effect to an existing set and return that set itself, use ++=."
+ )
+ override def ++(xs: TraversableOnce[A]): This = clone() ++= xs
/** Removes a single element from this collection and returns
* the collection itself.
*
* @param elem the element to remove.
*/
- @deprecated("Use -= instead if you intend to remove by side effect from an existing collection.\n"+
- "Use `clone() -=' if you intend to create a new collection.")
- override def -(elem: A): This = { -=(elem); repr }
+ @migration(2, 8,
+ "As of 2.8, this operation creates a new set. To remove the element as a\n"+
+ "side effect to an existing set and return that set itself, use -=."
+ )
+ override def -(elem: A): This = clone() -= elem
/** Removes two or more elements from this collection and returns
* the collection itself.
@@ -189,36 +184,23 @@ trait SetLike[A, +This <: SetLike[A, This] with Set[A]]
* @param elem2 the second element to remove.
* @param elems the remaining elements to remove.
*/
- @deprecated("Use -= instead if you intend to remove by side effect from an existing collection.\n"+
- "Use `clone() -=' if you intend to create a new collection.")
- override def -(elem1: A, elem2: A, elems: A*): This = {
- this -= elem1 -= elem2 --= elems
- repr
- }
+ @migration(2, 8,
+ "As of 2.8, this operation creates a new set. To remove the elements as a\n"+
+ "side effect to an existing set and return that set itself, use -=."
+ )
+ override def -(elem1: A, elem2: A, elems: A*): This =
+ clone() -= elem1 -= elem2 --= elems
/** Removes a number of elements provided by a Traversable object and returns
* the collection itself.
*
* @param iter the Traversable object.
*/
- @deprecated("Use --= instead if you intend to remove by side effect from an existing collection.\n"+
- "Use `clone() --=' if you intend to create a new collection.")
- override def --(iter: scala.collection.Traversable[A]): This = {
- for (elem <- iter) -=(elem)
- repr
- }
-
- /** Removes a number of elements provided by an iterator and returns
- * the collection itself.
- *
- * @param iter the iterator
- */
- @deprecated("Use --= instead if you intend to remove by side effect from an existing collection.\n"+
- "Use `clone() --=' if you intend to create a new collection.")
- override def --(iter: Iterator[A]): This = {
- for (elem <- iter) -=(elem)
- repr
- }
+ @migration(2, 8,
+ "As of 2.8, this operation creates a new set. To remove the elements as a\n"+
+ "side effect to an existing set and return that set itself, use --=."
+ )
+ override def --(xs: TraversableOnce[A]): This = clone() --= xs
/** Send a message to this scriptable object.
*
diff --git a/src/library/scala/collection/mutable/Stack.scala b/src/library/scala/collection/mutable/Stack.scala
index bbb4189dc3..45e9fa24b2 100644
--- a/src/library/scala/collection/mutable/Stack.scala
+++ b/src/library/scala/collection/mutable/Stack.scala
@@ -15,6 +15,7 @@ package mutable
import generic._
import collection.immutable.{List, Nil}
import collection.Iterator
+import annotation.migration
/** A stack implements a data structure which allows to store and retrieve
* objects in a last-in-first-out (LIFO) fashion.
@@ -63,19 +64,11 @@ class Stack[A] private (var elems: List[A]) extends scala.collection.Seq[A] with
* @param elems the iterator object.
* @return the stack with the new elements on top.
*/
- def pushAll(elems: Iterator[A]): this.type = { for (elem <- elems) { push(elem); () }; this }
+ def pushAll(xs: TraversableOnce[A]): this.type = { xs foreach push ; this }
- /** Push all elements provided by the given iterable object onto
- * the stack. The last element returned by the traversable object
- * will be on top of the new stack.
- *
- * @param elems the iterable object.
- * @return the stack with the new elements on top.
- */
- def pushAll(elems: scala.collection.Traversable[A]): this.type = { for (elem <- elems) { push(elem); () }; this }
-
- @deprecated("use pushAll") def ++=(it: Iterator[A]): this.type = pushAll(it)
- @deprecated("use pushAll") def ++=(it: scala.collection.Iterable[A]): this.type = pushAll(it)
+ @deprecated("use pushAll")
+ @migration(2, 8, "Stack ++= now pushes arguments on the stack from left to right.")
+ def ++=(xs: TraversableOnce[A]): this.type = pushAll(xs)
/** Returns the top element of the stack. This method will not remove
* the element from the stack. An error is signaled if there is no
@@ -112,17 +105,27 @@ class Stack[A] private (var elems: List[A]) extends scala.collection.Seq[A] with
*
* @return an iterator over all stack elements.
*/
+ @migration(2, 8, "Stack iterator and foreach now traverse in FIFO order.")
override def iterator: Iterator[A] = elems.iterator
/** Creates a list of all stack elements in LIFO order.
*
* @return the created list.
*/
+ @migration(2, 8, "Stack iterator and foreach now traverse in FIFO order.")
override def toList: List[A] = elems
+ @migration(2, 8, "Stack iterator and foreach now traverse in FIFO order.")
+ override def foreach[U](f: A => U): Unit = super.foreach(f)
+
/** This method clones the stack.
*
* @return a stack with the same elements.
*/
override def clone(): Stack[A] = new Stack[A](elems)
}
+
+// !!! TODO - integrate
+object Stack {
+ def apply[A](xs: A*): Stack[A] = new Stack[A] ++= xs
+}
diff --git a/src/library/scala/collection/mutable/StackProxy.scala b/src/library/scala/collection/mutable/StackProxy.scala
index 99f556112a..d3810dd158 100644
--- a/src/library/scala/collection/mutable/StackProxy.scala
+++ b/src/library/scala/collection/mutable/StackProxy.scala
@@ -48,15 +48,7 @@ trait StackProxy[A] extends Stack[A] with Proxy {
this
}
- override def pushAll(elems: Iterator[A]): this.type = {
- self pushAll elems
- this
- }
-
- override def pushAll(elems: scala.collection.Traversable[A]): this.type = {
- self pushAll elems
- this
- }
+ override def pushAll(xs: TraversableOnce[A]): this.type = { self pushAll xs; this }
/** Pushes all elements provided by an <code>Iterable</code> object
* on top of the stack. The elements are pushed in the order they
@@ -64,21 +56,8 @@ trait StackProxy[A] extends Stack[A] with Proxy {
*
* @param iter an iterable object
*/
- @deprecated("use pushAll") override def ++=(iter: scala.collection.Iterable[A]): this.type = {
- self ++= iter
- this
- }
+ @deprecated("use pushAll") override def ++=(xs: TraversableOnce[A]): this.type = { self ++= xs ; this }
- /** Pushes all elements provided by an iterator
- * on top of the stack. The elements are pushed in the order they
- * are given out by the iterator.
- *
- * @param iter an iterator
- */
- @deprecated("use pushAll") override def ++=(it: Iterator[A]): this.type = {
- self ++= it
- this
- }
override def push(elem1: A, elem2: A, elems: A*): this.type = {
self.push(elem1).push(elem2).pushAll(elems)
diff --git a/src/library/scala/collection/mutable/SynchronizedBuffer.scala b/src/library/scala/collection/mutable/SynchronizedBuffer.scala
index f6caa57729..1c9a77c46a 100644
--- a/src/library/scala/collection/mutable/SynchronizedBuffer.scala
+++ b/src/library/scala/collection/mutable/SynchronizedBuffer.scala
@@ -60,8 +60,8 @@ trait SynchronizedBuffer[A] extends Buffer[A] {
*
* @param iter the iterable object.
*/
- override def ++(iter: Traversable[A]): Self = synchronized {
- super.++(iter)
+ override def ++(xs: TraversableOnce[A]): Self = synchronized {
+ super.++(xs)
}
/** Appends a number of elements provided by an iterable object
@@ -69,8 +69,8 @@ trait SynchronizedBuffer[A] extends Buffer[A] {
*
* @param iter the iterable object.
*/
- override def ++=(iter: Traversable[A]): this.type = synchronized[this.type] {
- super.++=(iter)
+ override def ++=(xs: TraversableOnce[A]): this.type = synchronized[this.type] {
+ super.++=(xs)
}
/** Appends a sequence of elements to this buffer.
@@ -86,8 +86,8 @@ trait SynchronizedBuffer[A] extends Buffer[A] {
*
* @param iter the iterable object.
*/
- override def appendAll(iter: Traversable[A]): Unit = synchronized {
- super.appendAll(iter)
+ override def appendAll(xs: TraversableOnce[A]): Unit = synchronized {
+ super.appendAll(xs)
}
/** Prepend a single element to this buffer and return
@@ -105,17 +105,13 @@ trait SynchronizedBuffer[A] extends Buffer[A] {
*
* @param iter the iterable object.
*/
- override def ++=:(iter: Traversable[A]): this.type = synchronized[this.type] {
- super.++=:(iter)
- }
+ override def ++=:(xs: TraversableOnce[A]): this.type = synchronized[this.type] { super.++=:(xs) }
/** Prepend an element to this list.
*
* @param elem the element to prepend.
*/
- override def prepend(elems: A*): Unit = synchronized {
- super.prependAll(elems)
- }
+ override def prepend(elems: A*): Unit = prependAll(elems)
/** Prepends a number of elements provided by an iterable object
* via its <code>iterator</code> method. The identity of the
@@ -123,8 +119,8 @@ trait SynchronizedBuffer[A] extends Buffer[A] {
*
* @param iter the iterable object.
*/
- override def prependAll(elems: Traversable[A]): Unit = synchronized {
- super.prependAll(elems)
+ override def prependAll(xs: TraversableOnce[A]): Unit = synchronized {
+ super.prependAll(xs)
}
/** Inserts new elements at the index <code>n</code>. Opposed to method
diff --git a/src/library/scala/collection/mutable/SynchronizedMap.scala b/src/library/scala/collection/mutable/SynchronizedMap.scala
index ca29fa20b8..dabcaa7e1c 100644
--- a/src/library/scala/collection/mutable/SynchronizedMap.scala
+++ b/src/library/scala/collection/mutable/SynchronizedMap.scala
@@ -12,6 +12,7 @@
package scala.collection
package mutable
+import annotation.migration
/** This class should be used as a mixin. It synchronizes the <code>Map</code>
* functions of the class into which it is mixed in.
@@ -35,20 +36,21 @@ trait SynchronizedMap[A, B] extends Map[A, B] {
override def getOrElseUpdate(key: A, default: => B): B = synchronized { super.getOrElseUpdate(key, default) }
override def transform(f: (A, B) => B): this.type = synchronized[this.type] { super.transform(f) }
override def retain(p: (A, B) => Boolean): this.type = synchronized[this.type] { super.retain(p) }
- override def valuesIterable: scala.collection.Iterable[B] = synchronized { super.valuesIterable }
- @deprecated("Use `valuesIterator' instead") override def values: Iterator[B] = synchronized { super.valuesIterator }
+ @migration(2, 8, "As of 2.8, values returns Iterable[B] rather than Iterator[B].")
+ override def values: collection.Iterable[B] = synchronized { super.values }
override def valuesIterator: Iterator[B] = synchronized { super.valuesIterator }
override def clone(): Self = synchronized { super.clone() }
override def foreach[U](f: ((A, B)) => U) = synchronized { super.foreach(f) }
override def apply(key: A): B = synchronized { super.apply(key) }
- override def keySet: scala.collection.Set[A] = synchronized { super.keySet }
- @deprecated("Use `keysIterator' instead") override def keys: Iterator[A] = synchronized { super.keysIterator }
+ override def keySet: collection.Set[A] = synchronized { super.keySet }
+ @migration(2, 8, "As of 2.8, keys returns Iterable[A] rather than Iterator[A].")
+ override def keys: collection.Iterable[A] = synchronized { super.keys }
override def keysIterator: Iterator[A] = synchronized { super.keysIterator }
override def isEmpty: Boolean = synchronized { super.isEmpty }
override def contains(key: A): Boolean = synchronized {super.contains(key) }
override def isDefinedAt(key: A) = synchronized { super.isDefinedAt(key) }
- @deprecated("See Map.+ for explanation") override def +(kv: (A, B)): this.type = synchronized[this.type] { super.+(kv) }
+ // @deprecated("See Map.+ for explanation") override def +(kv: (A, B)): this.type = synchronized[this.type] { super.+(kv) }
// can't override -, -- same type!
// @deprecated override def -(key: A): Self = synchronized { super.-(key) }
diff --git a/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala b/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala
index 066c96a651..9d18846252 100644
--- a/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala
+++ b/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala
@@ -39,23 +39,11 @@ class SynchronizedPriorityQueue[A](implicit ord: Ordering[A]) extends PriorityQu
this
}
- /** Adds all elements provided by an <code>Iterable</code> object
- * into the priority queue.
- *
- * @param iter an iterable object
- */
- def ++=(iter: scala.collection.Iterable[A]): this.type = {
- synchronized {
- super.++=(iter)
- }
- this
- }
-
/** Adds all elements provided by an iterator into the priority queue.
*
* @param it an iterator
*/
- override def ++=(it: Iterator[A]): this.type = {
+ override def ++=(it: TraversableOnce[A]): this.type = {
synchronized {
super.++=(it)
}
@@ -87,7 +75,7 @@ class SynchronizedPriorityQueue[A](implicit ord: Ordering[A]) extends PriorityQu
*/
override def clear(): Unit = synchronized { super.clear }
- /** Returns an iterator which yiels all the elements of the priority
+ /** Returns an iterator which yield all the elements of the priority
* queue in descending priority order.
*
* @return an iterator over all elements sorted in descending order.
diff --git a/src/library/scala/collection/mutable/SynchronizedQueue.scala b/src/library/scala/collection/mutable/SynchronizedQueue.scala
index 3a1bc2e383..e7630cee06 100644
--- a/src/library/scala/collection/mutable/SynchronizedQueue.scala
+++ b/src/library/scala/collection/mutable/SynchronizedQueue.scala
@@ -42,15 +42,7 @@ class SynchronizedQueue[A] extends Queue[A] {
*
* @param iter an iterable object
*/
- override def ++=(iter: Traversable[A]): this.type = synchronized[this.type] { super.++=(iter) }
-
- /** Adds all elements provided by an iterator
- * at the end of the queue. The elements are prepended in the order they
- * are given out by the iterator.
- *
- * @param it an iterator
- */
- override def ++=(it: Iterator[A]): this.type = synchronized[this.type] { super.++=(it) }
+ override def ++=(xs: TraversableOnce[A]): this.type = synchronized[this.type] { super.++=(xs) }
/** Adds all elements to the queue.
*
diff --git a/src/library/scala/collection/mutable/SynchronizedSet.scala b/src/library/scala/collection/mutable/SynchronizedSet.scala
index a4832ba9f4..d3023b9136 100644
--- a/src/library/scala/collection/mutable/SynchronizedSet.scala
+++ b/src/library/scala/collection/mutable/SynchronizedSet.scala
@@ -39,24 +39,16 @@ trait SynchronizedSet[A] extends Set[A] {
super.+=(elem)
}
- override def ++=(that: Traversable[A]): this.type = synchronized[this.type] {
- super.++=(that)
- }
-
- override def ++=(it: Iterator[A]): this.type = synchronized[this.type] {
- super.++=(it)
+ override def ++=(xs: TraversableOnce[A]): this.type = synchronized[this.type] {
+ super.++=(xs)
}
abstract override def -=(elem: A): this.type = synchronized[this.type] {
super.-=(elem)
}
- override def --=(that: Traversable[A]): this.type = synchronized[this.type] {
- super.--=(that)
- }
-
- override def --=(it: Iterator[A]): this.type = synchronized[this.type] {
- super.--=(it)
+ override def --=(xs: TraversableOnce[A]): this.type = synchronized[this.type] {
+ super.--=(xs)
}
override def update(elem: A, included: Boolean): Unit = synchronized {
@@ -103,7 +95,7 @@ trait SynchronizedSet[A] extends Set[A] {
super.<<(cmd)
}
- override def clone(): Set[A] = synchronized {
+ override def clone(): Self = synchronized {
super.clone()
}
}
diff --git a/src/library/scala/collection/mutable/SynchronizedStack.scala b/src/library/scala/collection/mutable/SynchronizedStack.scala
index ff2f986244..4394d307eb 100644
--- a/src/library/scala/collection/mutable/SynchronizedStack.scala
+++ b/src/library/scala/collection/mutable/SynchronizedStack.scala
@@ -44,21 +44,13 @@ class SynchronizedStack[A] extends Stack[A] {
*/
override def push(elem1: A, elem2: A, elems: A*): this.type = synchronized[this.type] { super.push(elem1, elem2, elems: _*) }
- /** Pushes all elements provided by an <code>Traversable</code> object
- * on top of the stack. The elements are pushed in the order they
- * are given out by the iterator.
- *
- * @param iter an iterable object
- */
- override def pushAll(elems: scala.collection.Traversable[A]): this.type = synchronized[this.type] { super.pushAll(elems) }
-
/** Pushes all elements provided by an iterator
* on top of the stack. The elements are pushed in the order they
* are given out by the iterator.
*
* @param elems an iterator
*/
- override def pushAll(elems: Iterator[A]): this.type = synchronized[this.type] { super.pushAll(elems) }
+ override def pushAll(xs: TraversableOnce[A]): this.type = synchronized[this.type] { super.pushAll(elems) }
/** Returns the top element of the stack. This method will not remove
* the element from the stack. An error is signaled if there is no
diff --git a/src/library/scala/collection/mutable/WeakHashMap.scala b/src/library/scala/collection/mutable/WeakHashMap.scala
index 81c91dec3d..cad4dc2e43 100644
--- a/src/library/scala/collection/mutable/WeakHashMap.scala
+++ b/src/library/scala/collection/mutable/WeakHashMap.scala
@@ -13,10 +13,19 @@ package scala.collection
package mutable
import JavaConversions._
+import generic._
+
/**
* @since 2.8
*/
-class WeakHashMap[A, B] extends JMapWrapper[A, B](new java.util.WeakHashMap) {
+class WeakHashMap[A, B] extends JMapWrapper[A, B](new java.util.WeakHashMap)
+ with JMapWrapperLike[A, B, WeakHashMap[A, B]] {
override def empty = new WeakHashMap[A, B]
}
+
+object WeakHashMap extends MutableMapFactory[WeakHashMap] {
+ implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), WeakHashMap[A, B]] = new MapCanBuildFrom[A, B]
+ def empty[A, B]: WeakHashMap[A, B] = new WeakHashMap[A, B]
+}
+
diff --git a/src/library/scala/collection/mutable/WrappedArray.scala b/src/library/scala/collection/mutable/WrappedArray.scala
index 6652f5e40a..10117a1086 100644
--- a/src/library/scala/collection/mutable/WrappedArray.scala
+++ b/src/library/scala/collection/mutable/WrappedArray.scala
@@ -41,6 +41,13 @@ abstract class WrappedArray[T] extends IndexedSeq[T] with ArrayLike[T, WrappedAr
/** The underlying array */
def array: Array[T]
+
+ override def toArray[U >: T : ClassManifest]: Array[U] =
+ if (implicitly[ClassManifest[U]].erasure eq array.getClass.getComponentType)
+ array.asInstanceOf[Array[U]]
+ else
+ super.toArray[U]
+
override def stringPrefix = "WrappedArray"
/** Clones this object, including the underlying Array. */
diff --git a/src/library/scala/collection/readme-if-you-want-to-add-something.txt b/src/library/scala/collection/readme-if-you-want-to-add-something.txt
new file mode 100755
index 0000000000..6700cb7b68
--- /dev/null
+++ b/src/library/scala/collection/readme-if-you-want-to-add-something.txt
@@ -0,0 +1,50 @@
+Conventions for Collection Implementors
+
+Martin Odersky
+19 Mar 2010
+
+This note describes some conventions which must be followed to keep
+the collection libraries consistent.
+
+We distinguish in the following between two kinds of methods
+
+ - ``Accessors'' access some of the elements of a collection, but return a result which
+ is unrelated to the collection.
+ Example of accessors are: head, foldLeft, indexWhere, toSeq.
+
+ - ``Transformers'' access elements of a collection and produce a new collection of related
+ type as a result. The relation might either be direct (same type as receiver)
+ or indirect, linked by a CanBuildFrom implicit.
+ Example of transformers are: filter, map, groupBy, zip.
+
+1. Proxies
+
+Every collection type has a Proxy class that forwards all operations to
+an underlying collection. Proxy methods are all implemented in classes
+with names ending in `ProxyLike'. If you add a new method to a collection
+class you need to add the same method to the corresponding ProxyLike class.
+
+2. Forwarders
+
+Classes Traversable, Iterable, and Seq also have forwarders, which
+forward all collection-specific accessor operations to an underlying
+collection. These are defined as classes with names ending
+in `Forwarder' in package collection.generic. If you add a new
+accessor method to a Seq or one of its collection superclasses, you
+need to add the same method to the corresponding forwarder class.
+
+3. Views
+
+Classes Traversable, Iterable, Seq, IndexedSeq, and mutable.IndexedSeq
+support views. Their operations are all defined in classes with names
+ending in `ViewLike'. If you add a new transformer method to one of
+the above collection classes, you need to add the same method to the
+corresponding view class. Failure to do so will cause the
+corresponding method to fail at runtime with an exception like
+UnsupportedOperationException("coll.newBuilder"). If there is no good
+way to implement the operation in question lazily, there's a fallback
+using the newForced method. See the definition of sorted in trait
+SeqViewLike as an example.
+
+
+
diff --git a/src/library/scala/compat/Platform.scala b/src/library/scala/compat/Platform.scala
index f7f5070699..7580d2cc0e 100644
--- a/src/library/scala/compat/Platform.scala
+++ b/src/library/scala/compat/Platform.scala
@@ -47,7 +47,7 @@ object Platform {
@inline
def getClassForName(name: String): Class[_] = java.lang.Class.forName(name)
- val EOL = System.getProperty("line.separator", "\n")
+ val EOL = util.Properties.lineSeparator
@inline
def currentTime: Long = System.currentTimeMillis()
diff --git a/src/library/scala/concurrent/DelayedLazyVal.scala b/src/library/scala/concurrent/DelayedLazyVal.scala
index 092800cb10..7c5d43e70c 100644
--- a/src/library/scala/concurrent/DelayedLazyVal.scala
+++ b/src/library/scala/concurrent/DelayedLazyVal.scala
@@ -27,9 +27,15 @@ import ops.future
* @version 2.8
*/
class DelayedLazyVal[T](f: () => T, body: => Unit) {
- @volatile private[this] var isDone = false
+ @volatile private[this] var _isDone = false
private[this] lazy val complete = f()
+ /** Whether the computation is complete.
+ *
+ * @return true if the computation is complete.
+ */
+ def isDone = _isDone
+
/** The current result of f(), or the final result if complete.
*
* @return the current value
@@ -38,6 +44,6 @@ class DelayedLazyVal[T](f: () => T, body: => Unit) {
future {
body
- isDone = true
+ _isDone = true
}
}
diff --git a/src/library/scala/math/BigDecimal.scala b/src/library/scala/math/BigDecimal.scala
index 6bd6b33484..bb6965fcdc 100644
--- a/src/library/scala/math/BigDecimal.scala
+++ b/src/library/scala/math/BigDecimal.scala
@@ -29,6 +29,8 @@ object BigDecimal
private val minCached = -512
private val maxCached = 512
+
+ /** Cache ony for defaultMathContext using BigDecimals in a small range. */
private lazy val cache = new Array[BigDecimal](maxCached - minCached + 1)
val defaultMathContext = MathContext.UNLIMITED
@@ -50,12 +52,13 @@ object BigDecimal
*/
def apply(i: Int): BigDecimal = apply(i, defaultMathContext)
def apply(i: Int, mc: MathContext): BigDecimal =
- if (minCached <= i && i <= maxCached) {
+ if (mc == defaultMathContext && minCached <= i && i <= maxCached) {
val offset = i - minCached
var n = cache(offset)
if (n eq null) { n = new BigDecimal(BigDec.valueOf(i), mc); cache(offset) = n }
n
- } else new BigDecimal(BigDec.valueOf(i), mc)
+ }
+ else new BigDecimal(BigDec.valueOf(i), mc)
/** Constructs a <code>BigDecimal</code> whose value is equal to that of the
* specified long value.
diff --git a/src/library/scala/math/BigInt.scala b/src/library/scala/math/BigInt.scala
index 4c9f970cb4..5267ad8b95 100644
--- a/src/library/scala/math/BigInt.scala
+++ b/src/library/scala/math/BigInt.scala
@@ -102,7 +102,7 @@ object BigInt {
*/
implicit def int2bigInt(i: Int): BigInt = apply(i)
- /** Implicit copnversion from long to BigInt
+ /** Implicit conversion from long to BigInt
*/
implicit def long2bigInt(l: Long): BigInt = apply(l)
}
diff --git a/src/library/scala/math/Numeric.scala b/src/library/scala/math/Numeric.scala
index fc8e7c307d..65f213a08e 100644
--- a/src/library/scala/math/Numeric.scala
+++ b/src/library/scala/math/Numeric.scala
@@ -75,6 +75,21 @@ object Numeric {
}
implicit object ByteIsIntegral extends ByteIsIntegral with Ordering.ByteOrdering
+ trait CharIsIntegral extends Integral[Char] {
+ def plus(x: Char, y: Char): Char = (x + y).toChar
+ def minus(x: Char, y: Char): Char = (x - y).toChar
+ def times(x: Char, y: Char): Char = (x * y).toChar
+ def quot(x: Char, y: Char): Char = (x / y).toChar
+ def rem(x: Char, y: Char): Char = (x % y).toChar
+ def negate(x: Char): Char = (-x).toChar
+ def fromInt(x: Int): Char = x.toChar
+ def toInt(x: Char): Int = x.toInt
+ def toLong(x: Char): Long = x.toLong
+ def toFloat(x: Char): Float = x.toFloat
+ def toDouble(x: Char): Double = x.toDouble
+ }
+ implicit object CharIsIntegral extends CharIsIntegral with Ordering.CharOrdering
+
trait LongIsIntegral extends Integral[Long] {
def plus(x: Long, y: Long): Long = x + y
def minus(x: Long, y: Long): Long = x - y
diff --git a/src/library/scala/math/Ordering.scala b/src/library/scala/math/Ordering.scala
index 1660cdb99e..04c2d96aba 100644
--- a/src/library/scala/math/Ordering.scala
+++ b/src/library/scala/math/Ordering.scala
@@ -133,6 +133,8 @@ object Ordering extends LowPriorityOrderingImplicits {
override def lteq(x: T, y: T): Boolean = !cmp(y, x)
}
+ def by[T, S: Ordering](f: T => S): Ordering[T] = fromLessThan((x, y) => implicitly[Ordering[S]].lt(f(x), f(y)))
+
trait UnitOrdering extends Ordering[Unit] {
def compare(x: Unit, y: Unit) = 0
}
diff --git a/src/library/scala/package.scala b/src/library/scala/package.scala
index 9fa09e3b72..9f31623bdf 100644
--- a/src/library/scala/package.scala
+++ b/src/library/scala/package.scala
@@ -26,6 +26,8 @@ package object scala {
type NumberFormatException = java.lang.NumberFormatException
type AbstractMethodError = java.lang.AbstractMethodError
+ type TraversableOnce[+A] = scala.collection.TraversableOnce[A]
+
type Traversable[+A] = scala.collection.Traversable[A]
val Traversable = scala.collection.Traversable
diff --git a/src/library/scala/reflect/ClassManifest.scala b/src/library/scala/reflect/ClassManifest.scala
index 58f3c89499..ded013a4b5 100644
--- a/src/library/scala/reflect/ClassManifest.scala
+++ b/src/library/scala/reflect/ClassManifest.scala
@@ -27,7 +27,7 @@ import scala.collection.mutable.{WrappedArray, ArrayBuilder}
* </p>
*/
@serializable
-trait ClassManifest[T] extends OptManifest[T] {
+trait ClassManifest[T] extends OptManifest[T] with Equals {
/** A class representing the type U to which T would be erased. Note
* that there is no subtyping relationship between T and U. */
@@ -73,15 +73,20 @@ trait ClassManifest[T] extends OptManifest[T] {
def >:>(that: ClassManifest[_]): Boolean =
that <:< this
+ def canEqual(other: Any) = other match {
+ case _: ClassManifest[_] => true
+ case _ => false
+ }
+
/** Tests whether the type represented by this manifest is equal to the
* type represented by `that' manifest. BE AWARE: the current
* implementation is an approximation, as the test is done on the
* erasure of the type. */
override def equals(that: Any): Boolean = that match {
- case _: AnyValManifest[_] => false
- case m: ClassManifest[_] => this.erasure == m.erasure
+ case m: ClassManifest[_] if m canEqual this => this.erasure == m.erasure
case _ => false
}
+ override def hashCode = this.erasure.hashCode
protected def arrayClass[T](tp: Predef.Class[_]): Predef.Class[Array[T]] =
java.lang.reflect.Array.newInstance(tp, 0).getClass.asInstanceOf[Predef.Class[Array[T]]]
@@ -225,11 +230,4 @@ object ClassManifest {
override val typeArguments = args.toList
override def toString = prefix.toString+"#"+name+argString
}
-
- /** ClassManifest for the intersection type `parents_0 with ... with parents_n'. */
- def intersectionType[T](parents: ClassManifest[_]*): ClassManifest[T] =
- new (ClassManifest[T] @serializable) {
- def erasure = parents.head.erasure
- override def toString = parents.mkString(" with ")
- }
}
diff --git a/src/library/scala/reflect/Code.scala b/src/library/scala/reflect/Code.scala
index 71e148db81..61138f2495 100644
--- a/src/library/scala/reflect/Code.scala
+++ b/src/library/scala/reflect/Code.scala
@@ -12,7 +12,7 @@
package scala.reflect
/** This type is required by the compiler and <b>should not be used in client code</b>. */
-class Code[Type](val tree: Tree)
+class Code[T](val tree: Tree)
/** This type is required by the compiler and <b>should not be used in client code</b>. */
object Code {
diff --git a/src/library/scala/reflect/Manifest.scala b/src/library/scala/reflect/Manifest.scala
index 69842e1193..b7cb86e1bd 100644
--- a/src/library/scala/reflect/Manifest.scala
+++ b/src/library/scala/reflect/Manifest.scala
@@ -27,18 +27,33 @@ import scala.collection.immutable.{List, Nil}
* </p>
*/
@serializable
-trait Manifest[T] extends ClassManifest[T] {
+trait Manifest[T] extends ClassManifest[T] with Equals {
override def typeArguments: List[Manifest[_]] = List()
override def arrayManifest: Manifest[Array[T]] =
Manifest.classType[Array[T]](arrayClass[T](erasure))
+
+ override def canEqual(that: Any): Boolean = that match {
+ case _: Manifest[_] => true
+ case _ => false
+ }
+ override def equals(that: Any): Boolean = that match {
+ case m: Manifest[_] if m canEqual this => (this <:< m) && (m <:< this)
+ case _ => false
+ }
+ override def hashCode = this.erasure.hashCode
}
@serializable
-trait AnyValManifest[T] extends Manifest[T] {
+trait AnyValManifest[T] extends Manifest[T] with Equals {
import Manifest.{ Any, AnyVal }
override def <:<(that: ClassManifest[_]): Boolean = (that eq this) || (that eq Any) || (that eq AnyVal)
+ override def canEqual(other: Any) = other match {
+ case _: AnyValManifest[_] => true
+ case _ => false
+ }
override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef]
+ override def hashCode = System.identityHashCode(this)
}
/** <ps>
@@ -137,6 +152,7 @@ object Manifest {
override def toString = "Any"
override def <:<(that: ClassManifest[_]): Boolean = (that eq this)
override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef]
+ override def hashCode = System.identityHashCode(this)
private def readResolve(): Any = Manifest.Any
}
@@ -144,6 +160,7 @@ object Manifest {
override def toString = "Object"
override def <:<(that: ClassManifest[_]): Boolean = (that eq this) || (that eq Any)
override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef]
+ override def hashCode = System.identityHashCode(this)
private def readResolve(): Any = Manifest.Object
}
@@ -151,6 +168,7 @@ object Manifest {
override def toString = "AnyVal"
override def <:<(that: ClassManifest[_]): Boolean = (that eq this) || (that eq Any)
override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef]
+ override def hashCode = System.identityHashCode(this)
private def readResolve(): Any = Manifest.AnyVal
}
@@ -159,6 +177,7 @@ object Manifest {
override def <:<(that: ClassManifest[_]): Boolean =
(that ne null) && (that ne Nothing) && !(that <:< AnyVal)
override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef]
+ override def hashCode = System.identityHashCode(this)
private def readResolve(): Any = Manifest.Null
}
@@ -166,6 +185,7 @@ object Manifest {
override def toString = "Nothing"
override def <:<(that: ClassManifest[_]): Boolean = (that ne null)
override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef]
+ override def hashCode = System.identityHashCode(this)
private def readResolve(): Any = Manifest.Nothing
}
diff --git a/src/library/scala/util/NameTransformer.scala b/src/library/scala/reflect/NameTransformer.scala
index 83451240d5..0629c3a2f8 100644..100755
--- a/src/library/scala/util/NameTransformer.scala
+++ b/src/library/scala/reflect/NameTransformer.scala
@@ -6,10 +6,10 @@
** |/ **
\* */
-// $Id$
+// $Id: NameTransformer.scala 20028 2009-12-07 11:49:19Z cunei $
-package scala.util
+package scala.reflect
/**
* @author Martin Odersky
diff --git a/src/library/scala/reflect/ScalaSignature.java b/src/library/scala/reflect/ScalaSignature.java
new file mode 100644
index 0000000000..d1cdbc0589
--- /dev/null
+++ b/src/library/scala/reflect/ScalaSignature.java
@@ -0,0 +1,13 @@
+package scala.reflect;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/** */
+@Retention(RetentionPolicy.RUNTIME)
+@Target(ElementType.TYPE)
+public @interface ScalaSignature {
+ public String bytes();
+}
diff --git a/src/library/scala/reflect/generic/AnnotationInfos.scala b/src/library/scala/reflect/generic/AnnotationInfos.scala
new file mode 100755
index 0000000000..cc6c909a45
--- /dev/null
+++ b/src/library/scala/reflect/generic/AnnotationInfos.scala
@@ -0,0 +1,50 @@
+package scala.reflect
+package generic
+
+trait AnnotationInfos { self: Universe =>
+
+ type AnnotationInfo <: AnyRef
+ val AnnotationInfo: AnnotationInfoExtractor
+
+ abstract class AnnotationInfoExtractor {
+ def apply(atp: Type, args: List[Tree], assocs: List[(Name, ClassfileAnnotArg)]): AnnotationInfo
+ def unapply(info: AnnotationInfo): Option[(Type, List[Tree], List[(Name, ClassfileAnnotArg)])]
+ }
+
+ type ClassfileAnnotArg <: AnyRef
+ implicit def classfileAnnotArgManifest: ClassManifest[ClassfileAnnotArg] // need a precise manifest to pass to UnPickle's toArray call
+
+ type LiteralAnnotArg <: ClassfileAnnotArg
+ val LiteralAnnotArg: LiteralAnnotArgExtractor
+
+ type ArrayAnnotArg <: ClassfileAnnotArg
+ val ArrayAnnotArg: ArrayAnnotArgExtractor
+
+ type ScalaSigBytes <: ClassfileAnnotArg
+ val ScalaSigBytes: ScalaSigBytesExtractor
+
+ type NestedAnnotArg <: ClassfileAnnotArg
+ val NestedAnnotArg: NestedAnnotArgExtractor
+
+ abstract class LiteralAnnotArgExtractor {
+ def apply(const: Constant): LiteralAnnotArg
+ def unapply(arg: LiteralAnnotArg): Option[Constant]
+ }
+
+ abstract class ArrayAnnotArgExtractor {
+ def apply(const: Array[ClassfileAnnotArg]): ArrayAnnotArg
+ def unapply(arg: ArrayAnnotArg): Option[Array[ClassfileAnnotArg]]
+ }
+
+ abstract class ScalaSigBytesExtractor {
+ def apply(bytes: Array[Byte]): ScalaSigBytes
+ def unapply(arg: ScalaSigBytes): Option[Array[Byte]]
+ }
+
+ abstract class NestedAnnotArgExtractor {
+ def apply(anninfo: AnnotationInfo): NestedAnnotArg
+ def unapply(arg: NestedAnnotArg): Option[AnnotationInfo]
+ }
+}
+
+
diff --git a/src/library/scala/reflect/generic/ByteCodecs.scala b/src/library/scala/reflect/generic/ByteCodecs.scala
new file mode 100644
index 0000000000..fd2e326e19
--- /dev/null
+++ b/src/library/scala/reflect/generic/ByteCodecs.scala
@@ -0,0 +1,209 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+package scala.reflect.generic
+
+object ByteCodecs {
+
+ def avoidZero(src: Array[Byte]): Array[Byte] = {
+ var i = 0
+ val srclen = src.length
+ var count = 0
+ while (i < srclen) {
+ if (src(i) == 0x7f) count += 1
+ i += 1
+ }
+ val dst = new Array[Byte](srclen + count)
+ i = 0
+ var j = 0
+ while (i < srclen) {
+ val in = src(i)
+ if (in == 0x7f) {
+ dst(j) = (0xc0).toByte
+ dst(j + 1) = (0x80).toByte
+ j += 2
+ } else {
+ dst(j) = (in + 1).toByte
+ j += 1
+ }
+ i += 1
+ }
+ dst
+ }
+
+ def regenerateZero(src: Array[Byte]): Int = {
+ var i = 0
+ val srclen = src.length
+ var j = 0
+ while (i < srclen) {
+ val in: Int = src(i) & 0xff
+ if (in == 0xc0 && (src(i + 1) & 0xff) == 0x80) {
+ src(j) = 0x7f
+ i += 2
+ } else {
+ src(j) = (in - 1).toByte
+ i += 1
+ }
+ j += 1
+ }
+ j
+ }
+
+ def encode8to7(src: Array[Byte]): Array[Byte] = {
+ val srclen = src.length
+ val dstlen = (srclen * 8 + 6) / 7
+ val dst = new Array[Byte](dstlen)
+ var i = 0
+ var j = 0
+ while (i + 6 < srclen) {
+ var in: Int = src(i) & 0xff
+ dst(j) = (in & 0x7f).toByte
+ var out: Int = in >>> 7
+ in = src(i + 1) & 0xff
+ dst(j + 1) = (out | (in << 1) & 0x7f).toByte
+ out = in >>> 6
+ in = src(i + 2) & 0xff
+ dst(j + 2) = (out | (in << 2) & 0x7f).toByte
+ out = in >>> 5
+ in = src(i + 3) & 0xff
+ dst(j + 3) = (out | (in << 3) & 0x7f).toByte
+ out = in >>> 4
+ in = src(i + 4) & 0xff
+ dst(j + 4) = (out | (in << 4) & 0x7f).toByte
+ out = in >>> 3
+ in = src(i + 5) & 0xff
+ dst(j + 5) = (out | (in << 5) & 0x7f).toByte
+ out = in >>> 2
+ in = src(i + 6) & 0xff
+ dst(j + 6) = (out | (in << 6) & 0x7f).toByte
+ out = in >>> 1
+ dst(j + 7) = out.toByte
+ i += 7
+ j += 8
+ }
+ if (i < srclen) {
+ var in: Int = src(i) & 0xff
+ dst(j) = (in & 0x7f).toByte; j += 1
+ var out: Int = in >>> 7
+ if (i + 1 < srclen) {
+ in = src(i + 1) & 0xff
+ dst(j) = (out | (in << 1) & 0x7f).toByte; j += 1
+ out = in >>> 6
+ if (i + 2 < srclen) {
+ in = src(i + 2) & 0xff
+ dst(j) = (out | (in << 2) & 0x7f).toByte; j += 1
+ out = in >>> 5
+ if (i + 3 < srclen) {
+ in = src(i + 3) & 0xff
+ dst(j) = (out | (in << 3) & 0x7f).toByte; j += 1
+ out = in >>> 4
+ if (i + 4 < srclen) {
+ in = src(i + 4) & 0xff
+ dst(j) = (out | (in << 4) & 0x7f).toByte; j += 1
+ out = in >>> 3
+ if (i + 5 < srclen) {
+ in = src(i + 5) & 0xff
+ dst(j) = (out | (in << 5) & 0x7f).toByte; j += 1
+ out = in >>> 2
+ }
+ }
+ }
+ }
+ }
+ if (j < dstlen) dst(j) = out.toByte
+ }
+ dst
+ }
+
+ @deprecated("use 2-argument version instead")
+ def decode7to8(src: Array[Byte], srclen: Int, dstlen: Int) { decode7to8(src, srclen) }
+
+ def decode7to8(src: Array[Byte], srclen: Int): Int = {
+ var i = 0
+ var j = 0
+ val dstlen = (srclen * 7 + 7) / 8
+ while (i + 7 < srclen) {
+ var out: Int = src(i)
+ var in: Byte = src(i + 1)
+ src(j) = (out | (in & 0x01) << 7).toByte
+ out = in >>> 1
+ in = src(i + 2)
+ src(j + 1) = (out | (in & 0x03) << 6).toByte
+ out = in >>> 2
+ in = src(i + 3)
+ src(j + 2) = (out | (in & 0x07) << 5).toByte
+ out = in >>> 3
+ in = src(i + 4)
+ src(j + 3) = (out | (in & 0x0f) << 4).toByte
+ out = in >>> 4
+ in = src(i + 5)
+ src(j + 4) = (out | (in & 0x1f) << 3).toByte
+ out = in >>> 5
+ in = src(i + 6)
+ src(j + 5) = (out | (in & 0x3f) << 2).toByte
+ out = in >>> 6
+ in = src(i + 7)
+ src(j + 6) = (out | in << 1).toByte
+ i += 8
+ j += 7
+ }
+ if (i < srclen) {
+ var out: Int = src(i)
+ if (i + 1 < srclen) {
+ var in: Byte = src(i + 1)
+ src(j) = (out | (in & 0x01) << 7).toByte; j += 1
+ out = in >>> 1
+ if (i + 2 < srclen) {
+ in = src(i + 2)
+ src(j) = (out | (in & 0x03) << 6).toByte; j += 1
+ out = in >>> 2
+ if (i + 3 < srclen) {
+ in = src(i + 3)
+ src(j) = (out | (in & 0x07) << 5).toByte; j += 1
+ out = in >>> 3
+ if (i + 4 < srclen) {
+ in = src(i + 4)
+ src(j) = (out | (in & 0x0f) << 4).toByte; j += 1
+ out = in >>> 4
+ if (i + 5 < srclen) {
+ in = src(i + 5)
+ src(j) = (out | (in & 0x1f) << 3).toByte; j += 1
+ out = in >>> 5
+ if (i + 6 < srclen) {
+ in = src(i + 6)
+ src(j) = (out | (in & 0x3f) << 2).toByte; j += 1
+ out = in >>> 6
+ }
+ }
+ }
+ }
+ }
+ }
+ if (j < dstlen) src(j) = out.toByte
+ }
+ dstlen
+ }
+
+ def encode(xs: Array[Byte]): Array[Byte] = avoidZero(encode8to7(xs))
+
+ @deprecated("use 1-argument version instead")
+ def decode(xs: Array[Byte], dstlen: Int) { decode(xs) }
+
+ /** Destructively decode array xs and returns the length of the decoded array */
+ def decode(xs: Array[Byte]): Int = {
+ val len = regenerateZero(xs)
+ decode7to8(xs, len)
+ }
+}
+
+
+
+
+
+
+
+
diff --git a/src/compiler/scala/tools/nsc/symtab/Constants.scala b/src/library/scala/reflect/generic/Constants.scala
index 9057c537a7..2fe9d24980 100644..100755
--- a/src/compiler/scala/tools/nsc/symtab/Constants.scala
+++ b/src/library/scala/reflect/generic/Constants.scala
@@ -2,18 +2,15 @@
* Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
-
-package scala.tools.nsc
-package symtab
+// $Id: Constants.scala 20028 2009-12-07 11:49:19Z cunei $
+package scala.reflect
+package generic
import java.lang.Integer.toOctalString
+import PickleFormat._
-import classfile.PickleFormat._
-
-trait Constants {
- self: SymbolTable =>
+trait Constants { self: Universe =>
import definitions._
@@ -33,8 +30,6 @@ trait Constants {
// For supporting java enumerations inside java annotations (see ClassfileParser)
final val EnumTag = LITERALenum - LITERAL
- def isNumeric(tag: Int) = ByteTag <= tag && tag <= DoubleTag
-
case class Constant(value: Any) {
val tag: Int =
@@ -48,11 +43,13 @@ trait Constants {
else if (value.isInstanceOf[Float]) FloatTag
else if (value.isInstanceOf[Double]) DoubleTag
else if (value.isInstanceOf[String]) StringTag
- else if (value.isInstanceOf[Type]) ClassTag
- else if (value.isInstanceOf[Symbol]) EnumTag
+ else if (value.isInstanceOf[AbsType]) ClassTag
+ else if (value.isInstanceOf[AbsSymbol]) EnumTag
else if (value == null) NullTag
else throw new Error("bad constant value: " + value)
+ def isNumeric: Boolean = ByteTag <= tag && tag <= DoubleTag
+
def tpe: Type = tag match {
case UnitTag => UnitClass.tpe
case BooleanTag => BooleanClass.tpe
@@ -65,7 +62,7 @@ trait Constants {
case DoubleTag => DoubleClass.tpe
case StringTag => StringClass.tpe
case NullTag => NullClass.tpe
- case ClassTag => Predef_classOfType(value.asInstanceOf[Type])
+ case ClassTag => ClassType(value.asInstanceOf[Type])
case EnumTag =>
// given (in java): "class A { enum E { VAL1 } }"
// - symbolValue: the symbol of the actual enumeration value (VAL1)
@@ -82,11 +79,11 @@ trait Constants {
override def equals(other: Any): Boolean = other match {
case that: Constant =>
this.tag == that.tag &&
- (this.value == that.value || isNaN(this.value) && isNaN(that.value))
+ (this.value == that.value || this.isNaN && that.isNaN)
case _ => false
}
- def isNaN(value: Any) = value match {
+ def isNaN = value match {
case f: Float => f.isNaN
case d: Double => d.isNaN
case _ => false
@@ -233,7 +230,7 @@ trait Constants {
def symbolValue: Symbol = value.asInstanceOf[Symbol]
- override def hashCode(): Int =
+ override def hashCode: Int =
if (value == null) 0 else value.hashCode() * 41 + 17
}
}
diff --git a/src/library/scala/reflect/generic/Flags.scala b/src/library/scala/reflect/generic/Flags.scala
new file mode 100755
index 0000000000..f0f1f14ade
--- /dev/null
+++ b/src/library/scala/reflect/generic/Flags.scala
@@ -0,0 +1,198 @@
+package scala.reflect
+package generic
+
+object Flags extends Flags
+
+class Flags {
+
+ // modifiers
+ final val IMPLICIT = 0x00000200
+ final val FINAL = 0x00000020
+ final val PRIVATE = 0x00000004
+ final val PROTECTED = 0x00000001
+
+ final val SEALED = 0x00000400
+ final val OVERRIDE = 0x00000002
+ final val CASE = 0x00000800
+ final val ABSTRACT = 0x00000008 // abstract class, or used in conjunction
+ // with abstract override.
+ // Note difference to DEFERRED!
+
+ final val DEFERRED = 0x00000010 // was `abstract' for members | trait is virtual
+ final val METHOD = 0x00000040 // a method
+ final val MODULE = 0x00000100 // symbol is module or class implementing a module
+ final val INTERFACE = 0x00000080 // symbol is an interface (i.e. a trait which defines only abstract methods)
+
+ final val MUTABLE = 0x00001000 // symbol is a mutable variable.
+ final val PARAM = 0x00002000 // symbol is a (value or type) parameter to a method
+ final val PACKAGE = 0x00004000 // symbol is a java package
+ // available: 0x00008000
+
+ final val COVARIANT = 0x00010000 // symbol is a covariant type variable
+ final val CAPTURED = 0x00010000 // variable is accessed from nested function.
+ // Set by LambdaLift
+ final val BYNAMEPARAM = 0x00010000 // parameter is by name
+ final val CONTRAVARIANT = 0x00020000 // symbol is a contravariant type variable
+ final val LABEL = 0x00020000 // method symbol is a label. Set by TailCall
+ final val INCONSTRUCTOR = 0x00020000 // class symbol is defined in this/superclass
+ // constructor.
+ final val ABSOVERRIDE = 0x00040000 // combination of abstract & override
+ final val LOCAL = 0x00080000 // symbol is local to current class (i.e. private[this] or protected[this]
+ // pre: PRIVATE or PROTECTED are also set
+ final val JAVA = 0x00100000 // symbol was defined by a Java class
+ final val SYNTHETIC = 0x00200000 // symbol is compiler-generated
+ final val STABLE = 0x00400000 // functions that are assumed to be stable
+ // (typically, access methods for valdefs)
+ // or classes that do not contain abstract types.
+ final val STATIC = 0x00800000 // static field, method or class
+
+ final val CASEACCESSOR = 0x01000000 // symbol is a case parameter (or its accessor)
+ final val TRAIT = 0x02000000 // symbol is a trait
+ final val DEFAULTPARAM = 0x02000000 // the parameter has a default value
+ final val BRIDGE = 0x04000000 // function is a bridge method. Set by Erasure
+ final val ACCESSOR = 0x08000000 // a value or variable accessor (getter or setter)
+
+ final val SUPERACCESSOR = 0x10000000 // a super accessor
+ final val PARAMACCESSOR = 0x20000000 // for value definitions: is an access method
+ // for a final val parameter
+ // for parameters: is a val parameter
+ final val MODULEVAR = 0x40000000 // for variables: is the variable caching a module value
+ final val SYNTHETICMETH = 0x40000000 // for methods: synthetic method, but without SYNTHETIC flag
+ final val MONOMORPHIC = 0x40000000 // for type symbols: does not have type parameters
+ final val LAZY = 0x80000000L // symbol is a lazy val. can't have MUTABLE unless transformed by typer
+
+ final val IS_ERROR = 0x100000000L // symbol is an error symbol
+ final val OVERLOADED = 0x200000000L // symbol is overloaded
+ final val LIFTED = 0x400000000L // class has been lifted out to package level
+ // local value has been lifted out to class level
+ // todo: make LIFTED = latePRIVATE?
+ final val MIXEDIN = 0x800000000L // term member has been mixed in
+ final val EXISTENTIAL = 0x800000000L // type is an existential parameter or skolem
+
+ final val EXPANDEDNAME = 0x1000000000L // name has been expanded with class suffix
+ final val IMPLCLASS = 0x2000000000L // symbol is an implementation class
+ final val PRESUPER = 0x2000000000L // value is evaluated before super call
+ final val TRANS_FLAG = 0x4000000000L // transient flag guaranteed to be reset
+ // after each phase.
+
+ final val LOCKED = 0x8000000000L // temporary flag to catch cyclic dependencies
+ final val SPECIALIZED = 0x10000000000L// symbol is a generated specialized member
+ final val DEFAULTINIT = 0x20000000000L// symbol is a generated specialized member
+ final val VBRIDGE = 0x40000000000L// symbol is a varargs bridge
+
+ // pickling and unpickling of flags
+
+ // The flags from 0x001 to 0x800 are different in the raw flags
+ // and in the pickled format.
+
+ private final val IMPLICIT_PKL = 0x00000001
+ private final val FINAL_PKL = 0x00000002
+ private final val PRIVATE_PKL = 0x00000004
+ private final val PROTECTED_PKL = 0x00000008
+
+ private final val SEALED_PKL = 0x00000010
+ private final val OVERRIDE_PKL = 0x00000020
+ private final val CASE_PKL = 0x00000040
+ private final val ABSTRACT_PKL = 0x00000080
+
+ private final val DEFERRED_PKL = 0x00000100
+ private final val METHOD_PKL = 0x00000200
+ private final val MODULE_PKL = 0x00000400
+ private final val INTERFACE_PKL = 0x00000800
+
+ private final val PKL_MASK = 0x00000FFF
+
+ final val PickledFlags: Long = 0xFFFFFFFFL
+
+ private val r2p = {
+ def rawFlagsToPickledAux(flags:Int) = {
+ var pflags=0
+ if ((flags & IMPLICIT )!=0) pflags|=IMPLICIT_PKL
+ if ((flags & FINAL )!=0) pflags|=FINAL_PKL
+ if ((flags & PRIVATE )!=0) pflags|=PRIVATE_PKL
+ if ((flags & PROTECTED)!=0) pflags|=PROTECTED_PKL
+ if ((flags & SEALED )!=0) pflags|=SEALED_PKL
+ if ((flags & OVERRIDE )!=0) pflags|=OVERRIDE_PKL
+ if ((flags & CASE )!=0) pflags|=CASE_PKL
+ if ((flags & ABSTRACT )!=0) pflags|=ABSTRACT_PKL
+ if ((flags & DEFERRED )!=0) pflags|=DEFERRED_PKL
+ if ((flags & METHOD )!=0) pflags|=METHOD_PKL
+ if ((flags & MODULE )!=0) pflags|=MODULE_PKL
+ if ((flags & INTERFACE)!=0) pflags|=INTERFACE_PKL
+ pflags
+ }
+ val v=new Array[Int](PKL_MASK+1)
+ var i=0
+ while (i<=PKL_MASK) {
+ v(i)=rawFlagsToPickledAux(i)
+ i+=1
+ }
+ v
+ }
+
+ private val p2r = {
+ def pickledToRawFlagsAux(pflags:Int) = {
+ var flags=0
+ if ((pflags & IMPLICIT_PKL )!=0) flags|=IMPLICIT
+ if ((pflags & FINAL_PKL )!=0) flags|=FINAL
+ if ((pflags & PRIVATE_PKL )!=0) flags|=PRIVATE
+ if ((pflags & PROTECTED_PKL)!=0) flags|=PROTECTED
+ if ((pflags & SEALED_PKL )!=0) flags|=SEALED
+ if ((pflags & OVERRIDE_PKL )!=0) flags|=OVERRIDE
+ if ((pflags & CASE_PKL )!=0) flags|=CASE
+ if ((pflags & ABSTRACT_PKL )!=0) flags|=ABSTRACT
+ if ((pflags & DEFERRED_PKL )!=0) flags|=DEFERRED
+ if ((pflags & METHOD_PKL )!=0) flags|=METHOD
+ if ((pflags & MODULE_PKL )!=0) flags|=MODULE
+ if ((pflags & INTERFACE_PKL)!=0) flags|=INTERFACE
+ flags
+ }
+ val v=new Array[Int](PKL_MASK+1)
+ var i=0
+ while (i<=PKL_MASK) {
+ v(i)=pickledToRawFlagsAux(i)
+ i+=1
+ }
+ v
+ }
+
+ def rawFlagsToPickled(flags:Long):Long =
+ (flags & ~PKL_MASK) | r2p(flags.toInt & PKL_MASK)
+
+ def pickledToRawFlags(pflags:Long):Long =
+ (pflags & ~PKL_MASK) | p2r(pflags.toInt & PKL_MASK)
+
+ // List of the raw flags, in pickled order
+ protected val pickledListOrder = {
+ def findBit(m:Long):Int = {
+ var mask=m
+ var i=0
+ while (i <= 62) {
+ if ((mask&1) == 1L) return i
+ mask >>= 1
+ i += 1
+ }
+ throw new AssertionError()
+ }
+ val v=new Array[Long](63)
+ v(findBit(IMPLICIT_PKL ))=IMPLICIT
+ v(findBit(FINAL_PKL ))=FINAL
+ v(findBit(PRIVATE_PKL ))=PRIVATE
+ v(findBit(PROTECTED_PKL))=PROTECTED
+ v(findBit(SEALED_PKL ))=SEALED
+ v(findBit(OVERRIDE_PKL ))=OVERRIDE
+ v(findBit(CASE_PKL ))=CASE
+ v(findBit(ABSTRACT_PKL ))=ABSTRACT
+ v(findBit(DEFERRED_PKL ))=DEFERRED
+ v(findBit(METHOD_PKL ))=METHOD
+ v(findBit(MODULE_PKL ))=MODULE
+ v(findBit(INTERFACE_PKL))=INTERFACE
+ var i=findBit(PKL_MASK+1)
+ while (i <= 62) {
+ v(i)=1L << i
+ i += 1
+ }
+ v.toList
+ }
+
+}
diff --git a/src/library/scala/reflect/generic/Names.scala b/src/library/scala/reflect/generic/Names.scala
new file mode 100755
index 0000000000..1b31726e3a
--- /dev/null
+++ b/src/library/scala/reflect/generic/Names.scala
@@ -0,0 +1,21 @@
+package scala.reflect
+package generic
+
+trait Names {
+
+ type Name >: Null <: AnyRef
+
+ def newTermName(cs: Array[Char], offset: Int, len: Int): Name
+ def newTermName(cs: Array[Byte], offset: Int, len: Int): Name
+ def newTermName(s: String): Name
+
+ def mkTermName(name: Name): Name
+
+ def newTypeName(cs: Array[Char], offset: Int, len: Int): Name
+ def newTypeName(cs: Array[Byte], offset: Int, len: Int): Name
+ def newTypeName(s: String): Name
+
+ def mkTypeName(name: Name): Name
+}
+
+
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/PickleBuffer.scala b/src/library/scala/reflect/generic/PickleBuffer.scala
index 3b54e00057..2fab02bcfe 100644..100755
--- a/src/compiler/scala/tools/nsc/symtab/classfile/PickleBuffer.scala
+++ b/src/library/scala/reflect/generic/PickleBuffer.scala
@@ -2,11 +2,10 @@
* Copyright 2005-2010 LAMP/EPFL
* @author Martin Odersky
*/
-// $Id$
+// $Id: PickleBuffer.scala 20028 2009-12-07 11:49:19Z cunei $
-package scala.tools.nsc
-package symtab
-package classfile
+package scala.reflect
+package generic
/** Variable length byte arrays, with methods for basic pickling and unpickling.
*
@@ -129,6 +128,29 @@ class PickleBuffer(data: Array[Byte], from: Int, to: Int) {
x << leading >> leading
}
+ /** Returns the buffer as a sequence of (Int, Array[Byte]) representing
+ * (tag, data) of the individual entries. Saves and restores buffer state.
+ */
+
+ def toIndexedSeq: IndexedSeq[(Int, Array[Byte])] = {
+ val saved = readIndex
+ readIndex = 0
+ readNat() ; readNat() // discarding version
+ val result = new Array[(Int, Array[Byte])](readNat())
+
+ result.indices foreach { index =>
+ val tag = readNat()
+ val len = readNat()
+ val bytes = data.slice(readIndex, len + readIndex)
+ readIndex += len
+
+ result(index) = tag -> bytes
+ }
+
+ readIndex = saved
+ result.toIndexedSeq
+ }
+
/** Perform operation <code>op</code> until the condition
* <code>readIndex == end</code> is satisfied.
* Concatenate results into a list.
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/PickleFormat.scala b/src/library/scala/reflect/generic/PickleFormat.scala
index 56b9916f01..d1e884f513 100644..100755
--- a/src/compiler/scala/tools/nsc/symtab/classfile/PickleFormat.scala
+++ b/src/library/scala/reflect/generic/PickleFormat.scala
@@ -1,12 +1,5 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2010 LAMP/EPFL
- * @author Martin Odersky
- */
-// $Id$
-
-package scala.tools.nsc
-package symtab
-package classfile
+package scala.reflect
+package generic
/** This object provides constants for pickling attributes.
*
@@ -28,21 +21,21 @@ object PickleFormat {
* | 5 ALIASsym len_Nat SymbolInfo
* | 6 CLASSsym len_Nat SymbolInfo [thistype_Ref]
* | 7 MODULEsym len_Nat SymbolInfo
- * | 8 VALsym len_Nat [defaultGetter_Ref] SymbolInfo [alias_Ref]
+ * | 8 VALsym len_Nat [defaultGetter_Ref /* no longer needed*/] SymbolInfo [alias_Ref]
* | 9 EXTref len_Nat name_Ref [owner_Ref]
* | 10 EXTMODCLASSref len_Nat name_Ref [owner_Ref]
* | 11 NOtpe len_Nat
* | 12 NOPREFIXtpe len_Nat
* | 13 THIStpe len_Nat sym_Ref
* | 14 SINGLEtpe len_Nat type_Ref sym_Ref
- * | 15 CONSTANTtpe len_Nat type_Ref constant_Ref
+ * | 15 CONSTANTtpe len_Nat constant_Ref
* | 16 TYPEREFtpe len_Nat type_Ref sym_Ref {targ_Ref}
* | 17 TYPEBOUNDStpe len_Nat tpe_Ref tpe_Ref
* | 18 REFINEDtpe len_Nat classsym_Ref {tpe_Ref}
* | 19 CLASSINFOtpe len_Nat classsym_Ref {tpe_Ref}
* | 20 METHODtpe len_Nat tpe_Ref {sym_Ref}
* | 21 POLYTtpe len_Nat tpe_Ref {sym_Ref}
- * | 22 IMPLICITMETHODtpe len_Nat tpe_Ref {sym_Ref}
+ * | 22 IMPLICITMETHODtpe len_Nat tpe_Ref {sym_Ref} /* no longer needed */
* | 52 SUPERtpe len_Nat tpe_Ref tpe_Ref
* | 24 LITERALunit len_Nat
* | 25 LITERALboolean len_Nat value_Long
@@ -59,7 +52,7 @@ object PickleFormat {
* | 36 LITERALenum len_Nat sym_Ref
* | 40 SYMANNOT len_Nat sym_Ref AnnotInfoBody
* | 41 CHILDREN len_Nat sym_Ref {sym_Ref}
- * | 42 ANNOTATEDtpe len_Nat [sym_Ref] tpe_Ref {annotinfo_Ref}
+ * | 42 ANNOTATEDtpe len_Nat [sym_Ref /* no longer needed */] tpe_Ref {annotinfo_Ref}
* | 43 ANNOTINFO len_Nat AnnotInfoBody
* | 44 ANNOTARGARRAY len_Nat {constAnnotArg_Ref}
* | 47 DEBRUIJNINDEXtpe len_Nat level_Nat index_Nat
diff --git a/src/library/scala/reflect/generic/Scopes.scala b/src/library/scala/reflect/generic/Scopes.scala
new file mode 100755
index 0000000000..9f8a8ecd19
--- /dev/null
+++ b/src/library/scala/reflect/generic/Scopes.scala
@@ -0,0 +1,15 @@
+package scala.reflect
+package generic
+
+trait Scopes { self: Universe =>
+
+ abstract class AbsScope extends Iterable[Symbol] {
+ def enter(sym: Symbol): Symbol
+ }
+
+ type Scope <: AbsScope
+
+ def newScope(): Scope
+}
+
+
diff --git a/src/library/scala/reflect/generic/StandardDefinitions.scala b/src/library/scala/reflect/generic/StandardDefinitions.scala
new file mode 100755
index 0000000000..24dce7173a
--- /dev/null
+++ b/src/library/scala/reflect/generic/StandardDefinitions.scala
@@ -0,0 +1,67 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Martin Odersky
+ */
+// $Id: Definitions.scala 20619 2010-01-20 10:55:56Z rytz $
+
+package scala.reflect
+package generic
+
+trait StandardDefinitions { self: Universe =>
+
+ val definitions: AbsDefinitions
+
+ abstract class AbsDefinitions {
+
+ // outer packages and their classes
+ def RootPackage: Symbol
+ def RootClass: Symbol
+ def EmptyPackage: Symbol
+ def EmptyPackageClass: Symbol
+
+ def ScalaPackage: Symbol
+ def ScalaPackageClass: Symbol
+
+ // top types
+ def AnyClass : Symbol
+ def AnyValClass: Symbol
+ def AnyRefClass: Symbol
+ def ObjectClass: Symbol
+
+ // bottom types
+ def NullClass : Symbol
+ def NothingClass: Symbol
+
+ // the scala value classes
+ def UnitClass : Symbol
+ def ByteClass : Symbol
+ def ShortClass : Symbol
+ def CharClass : Symbol
+ def IntClass : Symbol
+ def LongClass : Symbol
+ def FloatClass : Symbol
+ def DoubleClass : Symbol
+ def BooleanClass: Symbol
+
+ // fundamental reference classes
+ def SymbolClass : Symbol
+ def StringClass : Symbol
+ def ClassClass : Symbol
+
+ // fundamental modules
+ def PredefModule: Symbol
+
+ // fundamental type constructions
+ def ClassType(arg: Type): Type
+
+ /** The string representation used by the given type in the VM.
+ */
+ def signature(tp: Type): String
+
+ /** Is symbol one of the value classes? */
+ def isValueClass(sym: Symbol): Boolean
+
+ /** Is symbol one of the numeric value classes? */
+ def isNumericValueClass(sym: Symbol): Boolean
+ }
+}
diff --git a/src/library/scala/reflect/generic/StdNames.scala b/src/library/scala/reflect/generic/StdNames.scala
new file mode 100755
index 0000000000..7a3b9169d8
--- /dev/null
+++ b/src/library/scala/reflect/generic/StdNames.scala
@@ -0,0 +1,26 @@
+package scala.reflect
+package generic
+
+trait StdNames { self: Universe =>
+
+ val nme: StandardNames
+
+ class StandardNames {
+ val EXPAND_SEPARATOR_STRING = "$$"
+ val LOCAL_SUFFIX_STRING = " "
+
+ val ANON_CLASS_NAME = newTermName("$anon")
+ val ANON_FUN_NAME = newTermName("$anonfun")
+ val EMPTY_PACKAGE_NAME = newTermName("<empty>")
+ val IMPORT = newTermName("<import>")
+ val REFINE_CLASS_NAME = newTermName("<refinement>")
+ val ROOT = newTermName("<root>")
+ val ROOTPKG = newTermName("_root_")
+ val EMPTY = newTermName("")
+
+ /** The expanded name of `name' relative to this class `base` with given `separator`
+ */
+ def expandedName(name: Name, base: Symbol, separator: String = EXPAND_SEPARATOR_STRING): Name =
+ newTermName(base.fullName('$') + separator + name)
+ }
+}
diff --git a/src/library/scala/reflect/generic/Symbols.scala b/src/library/scala/reflect/generic/Symbols.scala
new file mode 100755
index 0000000000..2f5e0624ab
--- /dev/null
+++ b/src/library/scala/reflect/generic/Symbols.scala
@@ -0,0 +1,194 @@
+package scala.reflect
+package generic
+
+import Flags._
+
+trait Symbols { self: Universe =>
+
+ type Symbol >: Null <: AbsSymbol
+
+ abstract class AbsSymbol { this: Symbol =>
+
+ /** The owner of this symbol.
+ */
+ def owner: Symbol
+
+ /** The flags of this symbol */
+ def flags: Long
+
+ /** The name of the symbol as a member of the `Name` type.
+ */
+ def name: Name
+
+ /** The name of the symbol before decoding, e.g. `$eq$eq` instead of `==`.
+ */
+ def encodedName: String
+
+ /** The decoded name of the symbol, e.g. `==` instead of `$eq$eq`.
+ */
+ def decodedName: String = stripLocalSuffix(NameTransformer.decode(encodedName))
+
+ /** The encoded full path name of this symbol, where outer names and inner names
+ * are separated by `separator` characters.
+ * Never translates expansions of operators back to operator symbol.
+ * Never adds id.
+ */
+ final def fullName(separator: Char): String = stripLocalSuffix {
+ if (isRoot || isRootPackage || this == NoSymbol) this.toString
+ else if (owner.isEffectiveRoot) encodedName
+ else owner.enclClass.fullName(separator) + separator + encodedName
+ }
+
+ private def stripLocalSuffix(s: String) = s stripSuffix nme.LOCAL_SUFFIX_STRING
+
+ /** The encoded full path name of this symbol, where outer names and inner names
+ * are separated by periods.
+ */
+ final def fullName: String = fullName('.')
+
+ /** Does symbol have ANY flag in `mask` set? */
+ final def hasFlag(mask: Long): Boolean = (flags & mask) != 0L
+
+ /** Does symbol have ALL the flags in `mask` set? */
+ final def hasAllFlags(mask: Long): Boolean = (flags & mask) == mask
+
+ /** Set when symbol has a modifier of the form private[X], NoSymbol otherwise.
+ */
+ def privateWithin: Symbol
+
+ /** The raw info of the type
+ */
+ def rawInfo: Type
+
+ /** The type of the symbol
+ */
+ def tpe: Type = info
+
+ /** The info of the symbol. This is like tpe, except for class symbols where the `info`
+ * describes the contents of the class whereas the `tpe` is a reference to the class.
+ */
+ def info: Type = {
+ val tp = rawInfo
+ tp.complete(this)
+ tp
+ }
+
+ /** If this symbol is a class or trait, its self type, otherwise the type of the symbol itse;lf
+ */
+ def typeOfThis: Type
+
+ def owner_=(sym: Symbol) { throw new UnsupportedOperationException("owner_= inapplicable for " + this) }
+ def flags_=(flags: Long) { throw new UnsupportedOperationException("flags_= inapplicable for " + this) }
+ def info_=(tp: Type) { throw new UnsupportedOperationException("info_= inapplicable for " + this) }
+ def typeOfThis_=(tp: Type) { throw new UnsupportedOperationException("typeOfThis_= inapplicable for " + this) }
+ def privateWithin_=(sym: Symbol) { throw new UnsupportedOperationException("privateWithin_= inapplicable for " + this) }
+ def sourceModule_=(sym: Symbol) { throw new UnsupportedOperationException("sourceModule_= inapplicable for " + this) }
+ def addChild(sym: Symbol) { throw new UnsupportedOperationException("addChild inapplicable for " + this) }
+ def addAnnotation(annot: AnnotationInfo) { throw new UnsupportedOperationException("addAnnotation inapplicable for " + this) }
+
+ /** For a module class its linked class, for a plain class
+ * the module class of its linked module.
+ * For instance
+ * object Foo
+ * class Foo
+ *
+ * Then object Foo has a `moduleClass' (invisible to the user, the backend calls it Foo$
+ * linkedClassOfClass goes from class Foo$ to class Foo, and back.
+ */
+ def linkedClassOfClass: Symbol
+
+ /** The module corresponding to this module class (note that this
+ * is not updated when a module is cloned), or NoSymbol if this is not a ModuleClass
+ */
+ def sourceModule: Symbol = NoSymbol
+
+ /** If symbol is an object definition, it's implied associated class,
+ * otherwise NoSymbol
+ */
+ def moduleClass: Symbol
+
+// flags and kind tests
+
+ def isTerm = false // to be overridden
+ def isType = false // to be overridden
+ def isClass = false // to be overridden
+ def isAliasType = false // to be overridden
+ def isAbstractType = false // to be overridden
+ private[scala] def isSkolem = false // to be overridden
+
+ def isTrait: Boolean = isClass && hasFlag(TRAIT) // refined later for virtual classes.
+ final def hasDefault = isParameter && hasFlag(DEFAULTPARAM)
+ final def isAbstractClass = isClass && hasFlag(ABSTRACT)
+ final def isAbstractOverride = isTerm && hasFlag(ABSTRACT) && hasFlag(OVERRIDE)
+ final def isBridge = hasFlag(BRIDGE)
+ final def isCase = hasFlag(CASE)
+ final def isCaseAccessor = hasFlag(CASEACCESSOR)
+ final def isContravariant = isType && hasFlag(CONTRAVARIANT)
+ final def isCovariant = isType && hasFlag(COVARIANT)
+ final def isDeferred = hasFlag(DEFERRED) && !isClass
+ final def isEarlyInitialized: Boolean = isTerm && hasFlag(PRESUPER)
+ final def isExistentiallyBound = isType && hasFlag(EXISTENTIAL)
+ final def isFinal = hasFlag(FINAL)
+ final def isGetterOrSetter = hasFlag(ACCESSOR)
+ final def isImplClass = isClass && hasFlag(IMPLCLASS) // Is this symbol an implementation class for a mixin?
+ final def isImplicit = hasFlag(IMPLICIT)
+ final def isInterface = hasFlag(INTERFACE)
+ final def isJavaDefined = hasFlag(JAVA)
+ final def isLazy = hasFlag(LAZY)
+ final def isMethod = isTerm && hasFlag(METHOD)
+ final def isModule = isTerm && hasFlag(MODULE)
+ final def isModuleClass = isClass && hasFlag(MODULE)
+ final def isMutable = hasFlag(MUTABLE)
+ final def isOverloaded = hasFlag(OVERLOADED)
+ final def isOverride = hasFlag(OVERRIDE)
+ final def isParamAccessor = hasFlag(PARAMACCESSOR)
+ final def isParameter = hasFlag(PARAM)
+ final def isRefinementClass = isClass && name == mkTypeName(nme.REFINE_CLASS_NAME)
+ final def isSealed = isClass && (hasFlag(SEALED) || definitions.isValueClass(this))
+ final def isSourceMethod = isTerm && (flags & (METHOD | STABLE)) == METHOD // exclude all accessors!!!
+ final def isSuperAccessor = hasFlag(SUPERACCESSOR)
+ final def isSynthetic = hasFlag(SYNTHETIC)
+ final def isTypeParameter = isType && isParameter && !isSkolem
+
+ /** Access tests */
+ final def isPrivate = hasFlag(PRIVATE)
+ final def isPrivateLocal = hasFlag(PRIVATE) && hasFlag(LOCAL)
+ final def isProtected = hasFlag(PROTECTED)
+ final def isProtectedLocal = hasFlag(PROTECTED) && hasFlag(LOCAL)
+ final def isPublic = !hasFlag(PRIVATE | PROTECTED) && privateWithin == NoSymbol
+
+ /** Package tests */
+ final def isEmptyPackage = isPackage && name == nme.EMPTY_PACKAGE_NAME
+ final def isEmptyPackageClass = isPackageClass && name == mkTypeName(nme.EMPTY_PACKAGE_NAME)
+ final def isPackage = isModule && hasFlag(PACKAGE)
+ final def isPackageClass = isClass && hasFlag(PACKAGE)
+ final def isRoot = isPackageClass && owner == NoSymbol
+ final def isRootPackage = isPackage && owner == NoSymbol
+
+ /** Is this symbol an effective root for fullname string?
+ */
+ def isEffectiveRoot = isRoot || isEmptyPackageClass
+
+ // creators
+
+ def newAbstractType(name: Name, pos: Position = NoPosition): Symbol
+ def newAliasType(name: Name, pos: Position = NoPosition): Symbol
+ def newClass(name: Name, pos: Position = NoPosition): Symbol
+ def newMethod(name: Name, pos: Position = NoPosition): Symbol
+ def newModule(name: Name, clazz: Symbol, pos: Position = NoPosition): Symbol
+ def newModuleClass(name: Name, pos: Position = NoPosition): Symbol
+ def newValue(name: Name, pos: Position = NoPosition): Symbol
+
+ // access to related symbols
+
+ /** The next enclosing class */
+ def enclClass: Symbol = if (isClass) this else owner.enclClass
+
+ /** The next enclosing method */
+ def enclMethod: Symbol = if (isSourceMethod) this else owner.enclMethod
+ }
+
+ val NoSymbol: Symbol
+}
+
+
diff --git a/src/library/scala/reflect/generic/Trees.scala b/src/library/scala/reflect/generic/Trees.scala
new file mode 100755
index 0000000000..df93d157e3
--- /dev/null
+++ b/src/library/scala/reflect/generic/Trees.scala
@@ -0,0 +1,738 @@
+package scala.reflect
+package generic
+
+import java.io.{PrintWriter, StringWriter}
+import Flags._
+
+trait Trees { self: Universe =>
+
+ abstract class AbsTreePrinter(out: PrintWriter) {
+ def print(tree: Tree)
+ def flush()
+ }
+
+ def newTreePrinter(out: PrintWriter): AbsTreePrinter
+
+ private[scala] var nodeCount = 0
+
+ /** @param privateWithin the qualifier for a private (a type name)
+ * or nme.EMPTY.toTypeName, if none is given.
+ * @param annotations the annotations for the definition.
+ * <strong>Note:</strong> the typechecker drops these annotations,
+ * use the AnnotationInfo's (Symbol.annotations) in later phases.
+ */
+ case class Modifiers(flags: Long, privateWithin: Name, annotations: List[Tree], positions: Map[Long, Position]) {
+ def isAbstract = hasFlag(ABSTRACT )
+ def isAccessor = hasFlag(ACCESSOR )
+ def isArgument = hasFlag(PARAM )
+ def isCase = hasFlag(CASE )
+ def isContravariant = hasFlag(CONTRAVARIANT) // marked with `-'
+ def isCovariant = hasFlag(COVARIANT ) // marked with `+'
+ def isDeferred = hasFlag(DEFERRED )
+ def isFinal = hasFlag(FINAL )
+ def isImplicit = hasFlag(IMPLICIT )
+ def isLazy = hasFlag(LAZY )
+ def isOverride = hasFlag(OVERRIDE )
+ def isPrivate = hasFlag(PRIVATE )
+ def isProtected = hasFlag(PROTECTED)
+ def isPublic = !isPrivate && !isProtected
+ def isSealed = hasFlag(SEALED )
+ def isTrait = hasFlag(TRAIT )
+ def isVariable = hasFlag(MUTABLE )
+
+ def hasFlag(flag: Long) = (flag & flags) != 0L
+ def & (flag: Long): Modifiers = {
+ val flags1 = flags & flag
+ if (flags1 == flags) this
+ else Modifiers(flags1, privateWithin, annotations, positions)
+ }
+ def &~ (flag: Long): Modifiers = {
+ val flags1 = flags & (~flag)
+ if (flags1 == flags) this
+ else Modifiers(flags1, privateWithin, annotations, positions)
+ }
+ def | (flag: Long): Modifiers = {
+ val flags1 = flags | flag
+ if (flags1 == flags) this
+ else Modifiers(flags1, privateWithin, annotations, positions)
+ }
+ def withAnnotations(annots: List[Tree]) =
+ if (annots.isEmpty) this
+ else copy(annotations = annotations ::: annots)
+ def withPosition(flag: Long, position: Position) =
+ copy(positions = positions + (flag -> position))
+ }
+
+ def Modifiers(flags: Long, privateWithin: Name): Modifiers = Modifiers(flags, privateWithin, List(), Map.empty)
+ def Modifiers(flags: Long): Modifiers = Modifiers(flags, mkTypeName(nme.EMPTY))
+
+ lazy val NoMods = Modifiers(0)
+
+ abstract class Tree extends Product {
+ val id = nodeCount
+ nodeCount += 1
+
+ private[this] var rawpos: Position = NoPosition
+
+ def pos = rawpos
+ def pos_=(pos: Position) = rawpos = pos
+ def setPos(pos: Position): this.type = { rawpos = pos; this }
+
+ private[this] var rawtpe: Type = _
+
+ def tpe = rawtpe
+ def tpe_=(t: Type) = rawtpe = t
+
+ /** Set tpe to give `tp` and return this.
+ */
+ def setType(tp: Type): this.type = { rawtpe = tp; this }
+
+ /** Like `setType`, but if this is a previously empty TypeTree
+ * that fact is remembered so that resetType will snap back.
+ */
+ def defineType(tp: Type): this.type = setType(tp)
+
+ def symbol: Symbol = null
+ def symbol_=(sym: Symbol) { throw new UnsupportedOperationException("symbol_= inapplicable for " + this) }
+ def setSymbol(sym: Symbol): this.type = { symbol = sym; this }
+
+ def hasSymbol = false
+ def isDef = false
+ def isEmpty = false
+
+ /** The direct child trees of this tree
+ * EmptyTrees are always omitted. Lists are collapsed.
+ */
+ def children: List[Tree] = {
+ def subtrees(x: Any): List[Tree] = x match {
+ case EmptyTree => List()
+ case t: Tree => List(t)
+ case xs: List[_] => xs flatMap subtrees
+ case _ => List()
+ }
+ productIterator.toList flatMap subtrees
+ }
+
+ /** In compiler: Make a copy of this tree, keeping all attributes,
+ * except that all positions are focussed (so nothing
+ * in this tree will be found when searching by position).
+ * If not in compiler may also return tree unchanged.
+ */
+ private[scala] def duplicate: this.type =
+ duplicateTree(this).asInstanceOf[this.type]
+
+ private[scala] def copyAttrs(tree: Tree): this.type = {
+ pos = tree.pos
+ tpe = tree.tpe
+ if (hasSymbol) symbol = tree.symbol
+ this
+ }
+
+ override def toString(): String = {
+ val buffer = new StringWriter()
+ val printer = newTreePrinter(new PrintWriter(buffer))
+ printer.print(this)
+ printer.flush()
+ buffer.toString
+ }
+
+ override def hashCode(): Int = super.hashCode()
+
+ override def equals(that: Any): Boolean = that match {
+ case t: Tree => this eq t
+ case _ => false
+ }
+ }
+
+ private[scala] def duplicateTree(tree: Tree): Tree = tree
+
+ trait SymTree extends Tree {
+ override def hasSymbol = true
+ override var symbol: Symbol = NoSymbol
+ }
+
+ trait RefTree extends SymTree {
+ def name: Name
+ }
+
+ abstract class DefTree extends SymTree {
+ def name: Name
+ override def isDef = true
+ }
+
+ trait TermTree extends Tree
+
+ /** A tree for a type. Note that not all type trees implement
+ * this trait; in particular, Ident's are an exception. */
+ trait TypTree extends Tree
+
+// ----- tree node alternatives --------------------------------------
+
+ /** The empty tree */
+ case object EmptyTree extends TermTree {
+ super.tpe_=(NoType)
+ override def tpe_=(t: Type) =
+ if (t != NoType) throw new UnsupportedOperationException("tpe_=("+t+") inapplicable for <empty>")
+ override def isEmpty = true
+ }
+
+ abstract class MemberDef extends DefTree {
+ def mods: Modifiers
+ def keyword: String = this match {
+ case TypeDef(_, _, _, _) => "type"
+ case ClassDef(mods, _, _, _) => if (mods.isTrait) "trait" else "class"
+ case DefDef(_, _, _, _, _, _) => "def"
+ case ModuleDef(_, _, _) => "object"
+ case PackageDef(_, _) => "package"
+ case ValDef(mods, _, _, _) => if (mods.isVariable) "var" else "val"
+ case _ => ""
+ }
+ final def hasFlag(mask: Long): Boolean = (mods.flags & mask) != 0L
+ }
+
+ /** Package clause
+ */
+ case class PackageDef(pid: RefTree, stats: List[Tree])
+ extends MemberDef {
+ def name = pid.name
+ def mods = NoMods
+ }
+
+ abstract class ImplDef extends MemberDef {
+ def impl: Template
+ }
+
+ /** Class definition */
+ case class ClassDef(mods: Modifiers, name: Name, tparams: List[TypeDef], impl: Template)
+ extends ImplDef
+
+ /** Singleton object definition
+ */
+ case class ModuleDef(mods: Modifiers, name: Name, impl: Template)
+ extends ImplDef
+
+ abstract class ValOrDefDef extends MemberDef {
+ def tpt: Tree
+ def rhs: Tree
+ }
+
+ /** Value definition
+ */
+ case class ValDef(mods: Modifiers, name: Name, tpt: Tree, rhs: Tree) extends ValOrDefDef
+
+ /** Method definition
+ */
+ case class DefDef(mods: Modifiers, name: Name, tparams: List[TypeDef],
+ vparamss: List[List[ValDef]], tpt: Tree, rhs: Tree) extends ValOrDefDef
+
+ /** Abstract type, type parameter, or type alias */
+ case class TypeDef(mods: Modifiers, name: Name, tparams: List[TypeDef], rhs: Tree)
+ extends MemberDef
+
+ /** <p>
+ * Labelled expression - the symbols in the array (must be Idents!)
+ * are those the label takes as argument
+ * </p>
+ * <p>
+ * The symbol that is given to the labeldef should have a MethodType
+ * (as if it were a nested function)
+ * </p>
+ * <p>
+ * Jumps are apply nodes attributed with label symbol, the arguments
+ * will get assigned to the idents.
+ * </p>
+ * <p>
+ * Note: on 2005-06-09 Martin, Iuli, Burak agreed to have forward
+ * jumps within a Block.
+ * </p>
+ */
+ case class LabelDef(name: Name, params: List[Ident], rhs: Tree)
+ extends DefTree with TermTree
+
+
+ /** Import selector
+ *
+ * Representation of an imported name its optional rename and their optional positions
+ *
+ * @param name the imported name
+ * @param namePos its position or -1 if undefined
+ * @param rename the name the import is renamed to (== name if no renaming)
+ * @param renamePos the position of the rename or -1 if undefined
+ */
+ case class ImportSelector(name: Name, namePos: Int, rename: Name, renamePos: Int)
+
+ /** Import clause
+ *
+ * @param expr
+ * @param selectors
+ */
+ case class Import(expr: Tree, selectors: List[ImportSelector])
+ extends SymTree
+ // The symbol of an Import is an import symbol @see Symbol.newImport
+ // It's used primarily as a marker to check that the import has been typechecked.
+
+ /** Instantiation template of a class or trait
+ *
+ * @param parents
+ * @param body
+ */
+ case class Template(parents: List[Tree], self: ValDef, body: List[Tree])
+ extends SymTree {
+ // the symbol of a template is a local dummy. @see Symbol.newLocalDummy
+ // the owner of the local dummy is the enclosing trait or class.
+ // the local dummy is itself the owner of any local blocks
+ // For example:
+ //
+ // class C {
+ // def foo // owner is C
+ // {
+ // def bar // owner is local dummy
+ // }
+ // System.err.println("TEMPLATE: " + parents)
+ }
+
+ /** Block of expressions (semicolon separated expressions) */
+ case class Block(stats: List[Tree], expr: Tree)
+ extends TermTree
+
+ /** Case clause in a pattern match, eliminated by TransMatch
+ * (except for occurrences in switch statements)
+ */
+ case class CaseDef(pat: Tree, guard: Tree, body: Tree)
+ extends Tree
+
+ /** Alternatives of patterns, eliminated by TransMatch, except for
+ * occurrences in encoded Switch stmt (=remaining Match(CaseDef(...))
+ */
+ case class Alternative(trees: List[Tree])
+ extends TermTree
+
+ /** Repetition of pattern, eliminated by TransMatch */
+ case class Star(elem: Tree)
+ extends TermTree
+
+ /** Bind of a variable to a rhs pattern, eliminated by TransMatch
+ *
+ * @param name
+ * @param body
+ */
+ case class Bind(name: Name, body: Tree)
+ extends DefTree
+
+ case class UnApply(fun: Tree, args: List[Tree])
+ extends TermTree
+
+ /** Array of expressions, needs to be translated in backend,
+ */
+ case class ArrayValue(elemtpt: Tree, elems: List[Tree])
+ extends TermTree
+
+ /** Anonymous function, eliminated by analyzer */
+ case class Function(vparams: List[ValDef], body: Tree)
+ extends TermTree with SymTree
+ // The symbol of a Function is a synthetic value of name nme.ANON_FUN_NAME
+ // It is the owner of the function's parameters.
+
+ /** Assignment */
+ case class Assign(lhs: Tree, rhs: Tree)
+ extends TermTree
+
+ /** Conditional expression */
+ case class If(cond: Tree, thenp: Tree, elsep: Tree)
+ extends TermTree
+
+ /** <p>
+ * Pattern matching expression (before <code>TransMatch</code>)
+ * Switch statements (after TransMatch)
+ * </p>
+ * <p>
+ * After <code>TransMatch</code>, cases will satisfy the following
+ * constraints:
+ * </p>
+ * <ul>
+ * <li>all guards are EmptyTree,</li>
+ * <li>all patterns will be either <code>Literal(Constant(x:Int))</code>
+ * or <code>Alternative(lit|...|lit)</code></li>
+ * <li>except for an "otherwise" branch, which has pattern
+ * <code>Ident(nme.WILDCARD)</code></li>
+ * </ul>
+ */
+ case class Match(selector: Tree, cases: List[CaseDef])
+ extends TermTree
+
+ /** Return expression */
+ case class Return(expr: Tree)
+ extends TermTree with SymTree
+ // The symbol of a Return node is the enclosing method.
+
+ case class Try(block: Tree, catches: List[CaseDef], finalizer: Tree)
+ extends TermTree
+
+ /** Throw expression */
+ case class Throw(expr: Tree)
+ extends TermTree
+
+ /** Object instantiation
+ * One should always use factory method below to build a user level new.
+ *
+ * @param tpt a class type
+ */
+ case class New(tpt: Tree) extends TermTree
+
+ /** Type annotation, eliminated by explicit outer */
+ case class Typed(expr: Tree, tpt: Tree)
+ extends TermTree
+
+ // Martin to Sean: Should GenericApply/TypeApply/Apply not be SymTree's? After all,
+ // ApplyDynamic is a SymTree.
+ abstract class GenericApply extends TermTree {
+ val fun: Tree
+ val args: List[Tree]
+ }
+
+ /** Type application */
+ case class TypeApply(fun: Tree, args: List[Tree])
+ extends GenericApply {
+ override def symbol: Symbol = fun.symbol
+ override def symbol_=(sym: Symbol) { fun.symbol = sym }
+ }
+
+ /** Value application */
+ case class Apply(fun: Tree, args: List[Tree])
+ extends GenericApply {
+ override def symbol: Symbol = fun.symbol
+ override def symbol_=(sym: Symbol) { fun.symbol = sym }
+ }
+
+ /** Dynamic value application.
+ * In a dynamic application q.f(as)
+ * - q is stored in qual
+ * - as is stored in args
+ * - f is stored as the node's symbol field.
+ */
+ case class ApplyDynamic(qual: Tree, args: List[Tree])
+ extends TermTree with SymTree
+ // The symbol of an ApplyDynamic is the function symbol of `qual', or NoSymbol, if there is none.
+
+ /** Super reference */
+ case class Super(qual: Name, mix: Name)
+ extends TermTree with SymTree
+ // The symbol of a Super is the class _from_ which the super reference is made.
+ // For instance in C.super(...), it would be C.
+
+ /** Self reference */
+ case class This(qual: Name)
+ extends TermTree with SymTree
+ // The symbol of a This is the class to which the this refers.
+ // For instance in C.this, it would be C.
+
+ /** Designator <qualifier> . <name> */
+ case class Select(qualifier: Tree, name: Name)
+ extends RefTree
+
+ /** Identifier <name> */
+ case class Ident(name: Name)
+ extends RefTree
+
+ class BackQuotedIdent(name: Name) extends Ident(name)
+
+ /** Literal */
+ case class Literal(value: Constant)
+ extends TermTree {
+ assert(value ne null)
+ }
+
+ def Literal(value: Any): Literal =
+ Literal(Constant(value))
+
+ type TypeTree <: AbsTypeTree
+ val TypeTree: TypeTreeExtractor
+
+ abstract class TypeTreeExtractor {
+ def apply(): TypeTree
+ def unapply(tree: TypeTree): Boolean
+ }
+
+ class Traverser {
+ protected var currentOwner: Symbol = definitions.RootClass
+ def traverse(tree: Tree): Unit = tree match {
+ case EmptyTree =>
+ ;
+ case PackageDef(pid, stats) =>
+ traverse(pid)
+ atOwner(tree.symbol.moduleClass) {
+ traverseTrees(stats)
+ }
+ case ClassDef(mods, name, tparams, impl) =>
+ atOwner(tree.symbol) {
+ traverseTrees(mods.annotations); traverseTrees(tparams); traverse(impl)
+ }
+ case ModuleDef(mods, name, impl) =>
+ atOwner(tree.symbol.moduleClass) {
+ traverseTrees(mods.annotations); traverse(impl)
+ }
+ case ValDef(mods, name, tpt, rhs) =>
+ atOwner(tree.symbol) {
+ traverseTrees(mods.annotations); traverse(tpt); traverse(rhs)
+ }
+ case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
+ atOwner(tree.symbol) {
+ traverseTrees(mods.annotations); traverseTrees(tparams); traverseTreess(vparamss); traverse(tpt); traverse(rhs)
+ }
+ case TypeDef(mods, name, tparams, rhs) =>
+ atOwner(tree.symbol) {
+ traverseTrees(mods.annotations); traverseTrees(tparams); traverse(rhs)
+ }
+ case LabelDef(name, params, rhs) =>
+ traverseTrees(params); traverse(rhs)
+ case Import(expr, selectors) =>
+ traverse(expr)
+ case Annotated(annot, arg) =>
+ traverse(annot); traverse(arg)
+ case Template(parents, self, body) =>
+ traverseTrees(parents)
+ if (!self.isEmpty) traverse(self)
+ traverseStats(body, tree.symbol)
+ case Block(stats, expr) =>
+ traverseTrees(stats); traverse(expr)
+ case CaseDef(pat, guard, body) =>
+ traverse(pat); traverse(guard); traverse(body)
+ case Alternative(trees) =>
+ traverseTrees(trees)
+ case Star(elem) =>
+ traverse(elem)
+ case Bind(name, body) =>
+ traverse(body)
+ case UnApply(fun, args) =>
+ traverse(fun); traverseTrees(args)
+ case ArrayValue(elemtpt, trees) =>
+ traverse(elemtpt); traverseTrees(trees)
+ case Function(vparams, body) =>
+ atOwner(tree.symbol) {
+ traverseTrees(vparams); traverse(body)
+ }
+ case Assign(lhs, rhs) =>
+ traverse(lhs); traverse(rhs)
+ case If(cond, thenp, elsep) =>
+ traverse(cond); traverse(thenp); traverse(elsep)
+ case Match(selector, cases) =>
+ traverse(selector); traverseTrees(cases)
+ case Return(expr) =>
+ traverse(expr)
+ case Try(block, catches, finalizer) =>
+ traverse(block); traverseTrees(catches); traverse(finalizer)
+ case Throw(expr) =>
+ traverse(expr)
+ case New(tpt) =>
+ traverse(tpt)
+ case Typed(expr, tpt) =>
+ traverse(expr); traverse(tpt)
+ case TypeApply(fun, args) =>
+ traverse(fun); traverseTrees(args)
+ case Apply(fun, args) =>
+ traverse(fun); traverseTrees(args)
+ case ApplyDynamic(qual, args) =>
+ traverse(qual); traverseTrees(args)
+ case Super(_, _) =>
+ ;
+ case This(_) =>
+ ;
+ case Select(qualifier, selector) =>
+ traverse(qualifier)
+ case Ident(_) =>
+ ;
+ case Literal(_) =>
+ ;
+ case TypeTree() =>
+ ;
+ case SingletonTypeTree(ref) =>
+ traverse(ref)
+ case SelectFromTypeTree(qualifier, selector) =>
+ traverse(qualifier)
+ case CompoundTypeTree(templ) =>
+ traverse(templ)
+ case AppliedTypeTree(tpt, args) =>
+ traverse(tpt); traverseTrees(args)
+ case TypeBoundsTree(lo, hi) =>
+ traverse(lo); traverse(hi)
+ case ExistentialTypeTree(tpt, whereClauses) =>
+ traverse(tpt); traverseTrees(whereClauses)
+ case SelectFromArray(qualifier, selector, erasure) =>
+ traverse(qualifier)
+ }
+
+ def traverseTrees(trees: List[Tree]) {
+ trees foreach traverse
+ }
+ def traverseTreess(treess: List[List[Tree]]) {
+ treess foreach traverseTrees
+ }
+ def traverseStats(stats: List[Tree], exprOwner: Symbol) {
+ stats foreach (stat =>
+ if (exprOwner != currentOwner) atOwner(exprOwner)(traverse(stat))
+ else traverse(stat)
+ )
+ }
+
+ def atOwner(owner: Symbol)(traverse: => Unit) {
+ val prevOwner = currentOwner
+ currentOwner = owner
+ traverse
+ currentOwner = prevOwner
+ }
+ }
+
+ /** A synthetic term holding an arbitrary type. Not to be confused with
+ * with TypTree, the trait for trees that are only used for type trees.
+ * TypeTree's are inserted in several places, but most notably in
+ * <code>RefCheck</code>, where the arbitrary type trees are all replaced by
+ * TypeTree's. */
+ abstract class AbsTypeTree extends TypTree {
+ override def symbol = if (tpe == null) null else tpe.typeSymbol
+ override def isEmpty = (tpe eq null) || tpe == NoType
+ }
+
+ /** A tree that has an annotation attached to it. Only used for annotated types and
+ * annotation ascriptions, annotations on definitions are stored in the Modifiers.
+ * Eliminated by typechecker (typedAnnotated), the annotations are then stored in
+ * an AnnotatedType.
+ */
+ case class Annotated(annot: Tree, arg: Tree) extends Tree
+
+ /** Singleton type, eliminated by RefCheck */
+ case class SingletonTypeTree(ref: Tree)
+ extends TypTree
+
+ /** Type selection <qualifier> # <name>, eliminated by RefCheck */
+ case class SelectFromTypeTree(qualifier: Tree, name: Name)
+ extends TypTree with RefTree
+
+ /** Intersection type <parent1> with ... with <parentN> { <decls> }, eliminated by RefCheck */
+ case class CompoundTypeTree(templ: Template)
+ extends TypTree
+
+ /** Applied type <tpt> [ <args> ], eliminated by RefCheck */
+ case class AppliedTypeTree(tpt: Tree, args: List[Tree])
+ extends TypTree {
+ override def symbol: Symbol = tpt.symbol
+ override def symbol_=(sym: Symbol) { tpt.symbol = sym }
+ }
+
+ case class TypeBoundsTree(lo: Tree, hi: Tree)
+ extends TypTree
+
+ case class ExistentialTypeTree(tpt: Tree, whereClauses: List[Tree])
+ extends TypTree
+
+ /** Array selection <qualifier> . <name> only used during erasure */
+ case class SelectFromArray(qualifier: Tree, name: Name, erasure: Type)
+ extends TermTree with RefTree { }
+
+/* A standard pattern match
+ case EmptyTree =>
+ case PackageDef(pid, stats) =>
+ // package pid { stats }
+ case ClassDef(mods, name, tparams, impl) =>
+ // mods class name [tparams] impl where impl = extends parents { defs }
+ case ModuleDef(mods, name, impl) => (eliminated by refcheck)
+ // mods object name impl where impl = extends parents { defs }
+ case ValDef(mods, name, tpt, rhs) =>
+ // mods val name: tpt = rhs
+ // note missing type information is expressed by tpt = TypeTree()
+ case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
+ // mods def name[tparams](vparams_1)...(vparams_n): tpt = rhs
+ // note missing type information is expressed by tpt = TypeTree()
+ case TypeDef(mods, name, tparams, rhs) => (eliminated by erasure)
+ // mods type name[tparams] = rhs
+ // mods type name[tparams] >: lo <: hi, where lo, hi are in a TypeBoundsTree,
+ and DEFERRED is set in mods
+ case LabelDef(name, params, rhs) =>
+ // used for tailcalls and like
+ // while/do are desugared to label defs as follows:
+ // while (cond) body ==> LabelDef($L, List(), if (cond) { body; L$() } else ())
+ // do body while (cond) ==> LabelDef($L, List(), body; if (cond) L$() else ())
+ case Import(expr, selectors) => (eliminated by typecheck)
+ // import expr.{selectors}
+ // Selectors are a list of pairs of names (from, to).
+ // The last (and maybe only name) may be a nme.WILDCARD
+ // for instance
+ // import qual.{x, y => z, _} would be represented as
+ // Import(qual, List(("x", "x"), ("y", "z"), (WILDCARD, null)))
+ case Template(parents, self, body) =>
+ // extends parents { self => body }
+ // if self is missing it is represented as emptyValDef
+ case Block(stats, expr) =>
+ // { stats; expr }
+ case CaseDef(pat, guard, body) => (eliminated by transmatch/explicitouter)
+ // case pat if guard => body
+ case Alternative(trees) => (eliminated by transmatch/explicitouter)
+ // pat1 | ... | patn
+ case Star(elem) => (eliminated by transmatch/explicitouter)
+ // pat*
+ case Bind(name, body) => (eliminated by transmatch/explicitouter)
+ // name @ pat
+ case UnApply(fun: Tree, args) (introduced by typer, eliminated by transmatch/explicitouter)
+ // used for unapply's
+ case ArrayValue(elemtpt, trees) => (introduced by uncurry)
+ // used to pass arguments to vararg arguments
+ // for instance, printf("%s%d", foo, 42) is translated to after uncurry to:
+ // Apply(
+ // Ident("printf"),
+ // Literal("%s%d"),
+ // ArrayValue(<Any>, List(Ident("foo"), Literal(42))))
+ case Function(vparams, body) => (eliminated by lambdaLift)
+ // vparams => body where vparams:List[ValDef]
+ case Assign(lhs, rhs) =>
+ // lhs = rhs
+ case If(cond, thenp, elsep) =>
+ // if (cond) thenp else elsep
+ case Match(selector, cases) =>
+ // selector match { cases }
+ case Return(expr) =>
+ // return expr
+ case Try(block, catches, finalizer) =>
+ // try block catch { catches } finally finalizer where catches: List[CaseDef]
+ case Throw(expr) =>
+ // throw expr
+ case New(tpt) =>
+ // new tpt always in the context: (new tpt).<init>[targs](args)
+ case Typed(expr, tpt) => (eliminated by erasure)
+ // expr: tpt
+ case TypeApply(fun, args) =>
+ // fun[args]
+ case Apply(fun, args) =>
+ // fun(args)
+ // for instance fun[targs](args) is expressed as Apply(TypeApply(fun, targs), args)
+ case ApplyDynamic(qual, args) (introduced by erasure, eliminated by cleanup)
+ // fun(args)
+ case Super(qual, mix) =>
+ // qual.super[mix] if qual and/or mix is empty, ther are nme.EMPTY.toTypeName
+ case This(qual) =>
+ // qual.this
+ case Select(qualifier, selector) =>
+ // qualifier.selector
+ case Ident(name) =>
+ // name
+ // note: type checker converts idents that refer to enclosing fields or methods
+ // to selects; name ==> this.name
+ case Literal(value) =>
+ // value
+ case TypeTree() => (introduced by refcheck)
+ // a type that's not written out, but given in the tpe attribute
+ case Annotated(annot, arg) => (eliminated by typer)
+ // arg @annot for types, arg: @annot for exprs
+ case SingletonTypeTree(ref) => (eliminated by uncurry)
+ // ref.type
+ case SelectFromTypeTree(qualifier, selector) => (eliminated by uncurry)
+ // qualifier # selector, a path-dependent type p.T is expressed as p.type # T
+ case CompoundTypeTree(templ: Template) => (eliminated by uncurry)
+ // parent1 with ... with parentN { refinement }
+ case AppliedTypeTree(tpt, args) => (eliminated by uncurry)
+ // tpt[args]
+ case TypeBoundsTree(lo, hi) => (eliminated by uncurry)
+ // >: lo <: hi
+ case ExistentialTypeTree(tpt, whereClauses) => (eliminated by uncurry)
+ // tpt forSome { whereClauses }
+
+*/
+}
diff --git a/src/library/scala/reflect/generic/Types.scala b/src/library/scala/reflect/generic/Types.scala
new file mode 100755
index 0000000000..17e19715d7
--- /dev/null
+++ b/src/library/scala/reflect/generic/Types.scala
@@ -0,0 +1,156 @@
+package scala.reflect
+package generic
+
+trait Types { self: Universe =>
+
+ abstract class AbsType {
+ def typeSymbol: Symbol
+ def decl(name: Name): Symbol
+
+ /** Is this type completed (i.e. not a lazy type)?
+ */
+ def isComplete: Boolean = true
+
+ /** If this is a lazy type, assign a new type to `sym'. */
+ def complete(sym: Symbol) {}
+
+ /** Convert toString avoiding infinite recursions by cutting off
+ * after `maxTostringRecursions` recursion levels. Uses `safeToString`
+ * to produce a string on each level.
+ */
+ override def toString: String =
+ if (tostringRecursions >= maxTostringRecursions)
+ "..."
+ else
+ try {
+ tostringRecursions += 1
+ safeToString
+ } finally {
+ tostringRecursions -= 1
+ }
+
+ /** Method to be implemented in subclasses.
+ * Converts this type to a string in calling toString for its parts.
+ */
+ def safeToString: String = super.toString
+ }
+
+ type Type >: Null <: AbsType
+
+ val NoType: Type
+ val NoPrefix: Type
+
+ type ThisType <: Type
+ val ThisType: ThisTypeExtractor
+
+ type TypeRef <: Type
+ val TypeRef: TypeRefExtractor
+
+ type SingleType <: Type
+ val SingleType: SingleTypeExtractor
+
+ type SuperType <: Type
+ val SuperType: SuperTypeExtractor
+
+ type TypeBounds <: Type
+ val TypeBounds: TypeBoundsExtractor
+
+ type CompoundType <: Type
+
+ type RefinedType <: CompoundType
+ val RefinedType: RefinedTypeExtractor
+
+ type ClassInfoType <: CompoundType
+ val ClassInfoType: ClassInfoTypeExtractor
+
+ type ConstantType <: Type
+ val ConstantType: ConstantTypeExtractor
+
+ type MethodType <: Type
+ val MethodType: MethodTypeExtractor
+
+ type PolyType <: Type
+ val PolyType: PolyTypeExtractor
+
+ type ExistentialType <: Type
+ val ExistentialType: ExistentialTypeExtractor
+
+ type AnnotatedType <: Type
+ val AnnotatedType: AnnotatedTypeExtractor
+
+ type LazyType <: Type with AbsLazyType
+
+ trait AbsLazyType extends AbsType {
+ override def isComplete: Boolean = false
+ override def complete(sym: Symbol)
+ override def safeToString = "<?>"
+ }
+
+ abstract class ThisTypeExtractor {
+ def apply(sym: Symbol): Type
+ def unapply(tpe: ThisType): Option[Symbol]
+ }
+
+ abstract class SingleTypeExtractor {
+ def apply(pre: Type, sym: Symbol): Type
+ def unapply(tpe: SingleType): Option[(Type, Symbol)]
+ }
+
+ abstract class SuperTypeExtractor {
+ def apply(thistpe: Type, supertpe: Type): Type
+ def unapply(tpe: SuperType): Option[(Type, Type)]
+ }
+
+ abstract class TypeRefExtractor {
+ def apply(pre: Type, sym: Symbol, args: List[Type]): Type
+ def unapply(tpe: TypeRef): Option[(Type, Symbol, List[Type])]
+ }
+
+ abstract class TypeBoundsExtractor {
+ def apply(lo: Type, hi: Type): TypeBounds
+ def unapply(tpe: TypeBounds): Option[(Type, Type)]
+ }
+
+ abstract class RefinedTypeExtractor {
+ def apply(parents: List[Type], decls: Scope): RefinedType
+ def apply(parents: List[Type], decls: Scope, clazz: Symbol): RefinedType
+ def unapply(tpe: RefinedType): Option[(List[Type], Scope)]
+ }
+
+ abstract class ClassInfoTypeExtractor {
+ def apply(parents: List[Type], decls: Scope, clazz: Symbol): ClassInfoType
+ def unapply(tpe: ClassInfoType): Option[(List[Type], Scope, Symbol)]
+ }
+
+ abstract class ConstantTypeExtractor {
+ def apply(value: Constant): ConstantType
+ def unapply(tpe: ConstantType): Option[Constant]
+ }
+
+ abstract class MethodTypeExtractor {
+ def apply(params: List[Symbol], resultType: Type): MethodType
+ def unapply(tpe: MethodType): Option[(List[Symbol], Type)]
+ }
+
+ abstract class PolyTypeExtractor {
+ def apply(typeParams: List[Symbol], resultType: Type): PolyType
+ def unapply(tpe: PolyType): Option[(List[Symbol], Type)]
+ }
+
+ abstract class ExistentialTypeExtractor {
+ def apply(quantified: List[Symbol], underlying: Type): ExistentialType
+ def unapply(tpe: ExistentialType): Option[(List[Symbol], Type)]
+ }
+
+ abstract class AnnotatedTypeExtractor {
+ def apply(annotations: List[AnnotationInfo], underlying: Type, selfsym: Symbol): AnnotatedType
+ def unapply(tpe: AnnotatedType): Option[(List[AnnotationInfo], Type, Symbol)]
+ }
+
+ /** The maximum number of recursions allowed in toString
+ */
+ final val maxTostringRecursions = 50
+
+ private var tostringRecursions = 0
+}
+
diff --git a/src/library/scala/reflect/generic/UnPickler.scala b/src/library/scala/reflect/generic/UnPickler.scala
new file mode 100755
index 0000000000..d7eef770cc
--- /dev/null
+++ b/src/library/scala/reflect/generic/UnPickler.scala
@@ -0,0 +1,775 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2010 LAMP/EPFL
+ * @author Martin Odersky
+ */
+// $Id: UnPickler.scala 20716 2010-01-28 14:14:20Z rytz $
+
+package scala.reflect
+package generic
+
+import java.io.IOException
+import java.lang.{Float, Double}
+
+import Flags._
+import PickleFormat._
+import collection.mutable.{HashMap, ListBuffer}
+import annotation.switch
+
+/** @author Martin Odersky
+ * @version 1.0
+ */
+abstract class UnPickler {
+
+ val global: Universe
+ import global._
+
+ /** Unpickle symbol table information descending from a class and/or module root
+ * from an array of bytes.
+ * @param bytes bytearray from which we unpickle
+ * @param offset offset from which unpickling starts
+ * @param classroot the top-level class which is unpickled, or NoSymbol if inapplicable
+ * @param moduleroot the top-level module which is unpickled, or NoSymbol if inapplicable
+ * @param filename filename associated with bytearray, only used for error messages
+ */
+ def unpickle(bytes: Array[Byte], offset: Int, classRoot: Symbol, moduleRoot: Symbol, filename: String) {
+ try {
+ scan(bytes, offset, classRoot, moduleRoot, filename)
+ } catch {
+ case ex: IOException =>
+ throw ex
+ case ex: Throwable =>
+ /*if (settings.debug.value)*/ ex.printStackTrace()
+ throw new RuntimeException("error reading Scala signature of "+filename+": "+ex.getMessage())
+ }
+ }
+
+ /** To ne implemented in subclasses. Like `unpickle` but without the catch-all error handling.
+ */
+ def scan(bytes: Array[Byte], offset: Int, classRoot: Symbol, moduleRoot: Symbol, filename: String)
+
+ abstract class Scan(bytes: Array[Byte], offset: Int, classRoot: Symbol, moduleRoot: Symbol, filename: String) extends PickleBuffer(bytes, offset, -1) {
+ //println("unpickle " + classRoot + " and " + moduleRoot)//debug
+
+ protected def debug = false
+
+ checkVersion()
+
+ /** A map from entry numbers to array offsets */
+ private val index = createIndex
+
+ /** A map from entry numbers to symbols, types, or annotations */
+ private val entries = new Array[AnyRef](index.length)
+
+ /** A map from symbols to their associated `decls' scopes */
+ private val symScopes = new HashMap[Symbol, Scope]
+
+ //println("unpickled " + classRoot + ":" + classRoot.rawInfo + ", " + moduleRoot + ":" + moduleRoot.rawInfo);//debug
+
+ def run() {
+ for (i <- 0 until index.length) {
+ if (isSymbolEntry(i))
+ at(i, readSymbol)
+ else if (isSymbolAnnotationEntry(i))
+ at(i, {() => readSymbolAnnotation(); null})
+ else if (isChildrenEntry(i))
+ at(i, {() => readChildren(); null})
+ }
+ }
+
+ private def checkVersion() {
+ val major = readNat()
+ val minor = readNat()
+ if (major != MajorVersion || minor > MinorVersion)
+ throw new IOException("Scala signature " + classRoot.decodedName +
+ " has wrong version\n expected: " +
+ MajorVersion + "." + MinorVersion +
+ "\n found: " + major + "." + minor +
+ " in "+filename)
+ }
+
+ /** The `decls' scope associated with given symbol */
+ protected def symScope(sym: Symbol) = symScopes.get(sym) match {
+ case None => val s = newScope; symScopes(sym) = s; s
+ case Some(s) => s
+ }
+
+ /** Does entry represent an (internal) symbol */
+ protected def isSymbolEntry(i: Int): Boolean = {
+ val tag = bytes(index(i)).toInt
+ (firstSymTag <= tag && tag <= lastSymTag &&
+ (tag != CLASSsym || !isRefinementSymbolEntry(i)))
+ }
+
+ /** Does entry represent an (internal or external) symbol */
+ protected def isSymbolRef(i: Int): Boolean = {
+ val tag = bytes(index(i))
+ (firstSymTag <= tag && tag <= lastExtSymTag)
+ }
+
+ /** Does entry represent a name? */
+ protected def isNameEntry(i: Int): Boolean = {
+ val tag = bytes(index(i)).toInt
+ tag == TERMname || tag == TYPEname
+ }
+
+ /** Does entry represent a symbol annotation? */
+ protected def isSymbolAnnotationEntry(i: Int): Boolean = {
+ val tag = bytes(index(i)).toInt
+ tag == SYMANNOT
+ }
+
+ /** Does the entry represent children of a symbol? */
+ protected def isChildrenEntry(i: Int): Boolean = {
+ val tag = bytes(index(i)).toInt
+ tag == CHILDREN
+ }
+
+ /** Does entry represent a refinement symbol?
+ * pre: Entry is a class symbol
+ */
+ protected def isRefinementSymbolEntry(i: Int): Boolean = {
+ val savedIndex = readIndex
+ readIndex = index(i)
+ val tag = readByte().toInt
+ assert(tag == CLASSsym)
+
+ readNat(); // read length
+ val result = readNameRef() == mkTypeName(nme.REFINE_CLASS_NAME)
+ readIndex = savedIndex
+ result
+ }
+
+ /** If entry at <code>i</code> is undefined, define it by performing
+ * operation <code>op</code> with <code>readIndex at start of i'th
+ * entry. Restore <code>readIndex</code> afterwards.
+ */
+ protected def at[T <: AnyRef](i: Int, op: () => T): T = {
+ var r = entries(i)
+ if (r eq null) {
+ val savedIndex = readIndex
+ readIndex = index(i)
+ r = op()
+ assert(entries(i) eq null, entries(i))
+ entries(i) = r
+ readIndex = savedIndex
+ }
+ r.asInstanceOf[T]
+ }
+
+ /** Read a name */
+ protected def readName(): Name = {
+ val tag = readByte()
+ val len = readNat()
+ tag match {
+ case TERMname => newTermName(bytes, readIndex, len)
+ case TYPEname => newTypeName(bytes, readIndex, len)
+ case _ => errorBadSignature("bad name tag: " + tag)
+ }
+ }
+
+ /** Read a symbol */
+ protected def readSymbol(): Symbol = {
+ val tag = readByte()
+ val end = readNat() + readIndex
+ var sym: Symbol = NoSymbol
+ tag match {
+ case EXTref | EXTMODCLASSref =>
+ val name = readNameRef()
+ val owner = if (readIndex == end) definitions.RootClass else readSymbolRef()
+ def fromName(name: Name) =
+ if (mkTermName(name) == nme.ROOT) definitions.RootClass
+ else if (name == nme.ROOTPKG) definitions.RootPackage
+ else if (tag == EXTref) owner.info.decl(name)
+ else owner.info.decl(name).moduleClass
+ sym = fromName(name)
+ // If sym not found try with expanded name.
+ // This can happen if references to private symbols are
+ // read from outside; for instance when checking the children of a class
+ // (see t1722)
+ if (sym == NoSymbol) sym = fromName(nme.expandedName(name, owner))
+
+ // If the owner is overloaded (i.e. a method), it's not possible to select the
+ // right member => return NoSymbol. This can only happen when unpickling a tree.
+ // the "case Apply" in readTree() takes care of selecting the correct alternative
+ // after parsing the arguments.
+ if (sym == NoSymbol && !owner.isOverloaded) errorMissingRequirement(name, owner)
+
+ case NONEsym =>
+ sym = NoSymbol
+
+ case _ => // symbols that were pickled with Pickler.writeSymInfo
+ var defaultGetter: Symbol = NoSymbol // @deprecated, to be removed for 2.8 final
+ var nameref = readNat()
+ if (tag == VALsym && isSymbolRef(nameref)) { // @deprecated, to be removed for 2.8 final
+ defaultGetter = at(nameref, readSymbol)
+ nameref = readNat()
+ }
+ val name = at(nameref, readName)
+ val owner = readSymbolRef()
+ val flags = pickledToRawFlags(readLongNat())
+ var privateWithin: Symbol = NoSymbol
+ var inforef = readNat()
+ if (isSymbolRef(inforef)) {
+ privateWithin = at(inforef, readSymbol)
+ inforef = readNat()
+ }
+ tag match {
+ case TYPEsym =>
+ sym = owner.newAbstractType(name)
+ case ALIASsym =>
+ sym = owner.newAliasType(name)
+ case CLASSsym =>
+ sym =
+ if (name == classRoot.name && owner == classRoot.owner)
+ (if ((flags & MODULE) != 0L) moduleRoot.moduleClass
+ else classRoot)
+ else
+ if ((flags & MODULE) != 0L) owner.newModuleClass(name)
+ else owner.newClass(name)
+ if (readIndex != end) sym.typeOfThis = newLazyTypeRef(readNat())
+ case MODULEsym =>
+ val clazz = at(inforef, readType).typeSymbol
+ sym =
+ if (name == moduleRoot.name && owner == moduleRoot.owner) moduleRoot
+ else {
+ val m = owner.newModule(name, clazz)
+ clazz.sourceModule = m
+ m
+ }
+ case VALsym =>
+ sym = if (name == moduleRoot.name && owner == moduleRoot.owner) { assert(false); NoSymbol }
+ else if ((flags & METHOD) != 0) owner.newMethod(name)
+ else owner.newValue(name)
+ case _ =>
+ noSuchSymbolTag(tag, end, name, owner)
+ }
+ sym.flags = flags & PickledFlags
+ sym.privateWithin = privateWithin
+ if (readIndex != end) assert(sym hasFlag (SUPERACCESSOR | PARAMACCESSOR), sym)
+ if (sym hasFlag SUPERACCESSOR) assert(readIndex != end)
+ sym.info =
+ if (readIndex != end) newLazyTypeRefAndAlias(inforef, readNat())
+ else newLazyTypeRef(inforef)
+ if (sym.owner.isClass && sym != classRoot && sym != moduleRoot &&
+ !sym.isModuleClass && !sym.isRefinementClass && !sym.isTypeParameter && !sym.isExistentiallyBound)
+ symScope(sym.owner) enter sym
+ }
+ sym
+ }
+
+ def noSuchSymbolTag(tag: Int, end: Int, name: Name, owner: Symbol) =
+ errorBadSignature("bad symbol tag: " + tag)
+
+ /** Read a type */
+ protected def readType(): Type = {
+ val tag = readByte()
+ val end = readNat() + readIndex
+ (tag: @switch) match {
+ case NOtpe =>
+ NoType
+ case NOPREFIXtpe =>
+ NoPrefix
+ case THIStpe =>
+ ThisType(readSymbolRef())
+ case SINGLEtpe =>
+ SingleType(readTypeRef(), readSymbolRef()) // !!! was singleType
+ case SUPERtpe =>
+ val thistpe = readTypeRef()
+ val supertpe = readTypeRef()
+ SuperType(thistpe, supertpe)
+ case CONSTANTtpe =>
+ ConstantType(readConstantRef())
+ case TYPEREFtpe =>
+ val pre = readTypeRef()
+ val sym = readSymbolRef()
+ var args = until(end, readTypeRef)
+ TypeRef(pre, sym, args)
+ case TYPEBOUNDStpe =>
+ TypeBounds(readTypeRef(), readTypeRef())
+ case REFINEDtpe =>
+ val clazz = readSymbolRef()
+ RefinedType(until(end, readTypeRef), symScope(clazz), clazz)
+ case CLASSINFOtpe =>
+ val clazz = readSymbolRef()
+ ClassInfoType(until(end, readTypeRef), symScope(clazz), clazz)
+ case METHODtpe | IMPLICITMETHODtpe =>
+ val restpe = readTypeRef()
+ val params = until(end, readSymbolRef)
+ // if the method is overloaded, the params cannot be determined (see readSymbol) => return NoType.
+ // Only happen for trees, "case Apply" in readTree() takes care of selecting the correct
+ // alternative after parsing the arguments.
+ if (params.contains(NoSymbol) || restpe == NoType) NoType
+ else MethodType(params, restpe)
+ case POLYtpe =>
+ val restpe = readTypeRef()
+ val typeParams = until(end, readSymbolRef)
+ PolyType(typeParams, restpe)
+ case EXISTENTIALtpe =>
+ val restpe = readTypeRef()
+ ExistentialType(until(end, readSymbolRef), restpe)
+ case ANNOTATEDtpe =>
+ var typeRef = readNat()
+ val selfsym = if (isSymbolRef(typeRef)) {
+ val s = at(typeRef, readSymbol)
+ typeRef = readNat()
+ s
+ } else NoSymbol // selfsym can go.
+ val tp = at(typeRef, readType)
+ val annots = until(end, readAnnotationRef)
+ if (selfsym == NoSymbol) AnnotatedType(annots, tp, selfsym)
+ else tp
+ case _ =>
+ noSuchTypeTag(tag, end)
+ }
+ }
+
+ def noSuchTypeTag(tag: Int, end: Int): Type =
+ errorBadSignature("bad type tag: " + tag)
+
+ /** Read a constant */
+ protected def readConstant(): Constant = {
+ val tag = readByte().toInt
+ val len = readNat()
+ (tag: @switch) match {
+ case LITERALunit => Constant(())
+ case LITERALboolean => Constant(readLong(len) != 0L)
+ case LITERALbyte => Constant(readLong(len).toByte)
+ case LITERALshort => Constant(readLong(len).toShort)
+ case LITERALchar => Constant(readLong(len).toChar)
+ case LITERALint => Constant(readLong(len).toInt)
+ case LITERALlong => Constant(readLong(len))
+ case LITERALfloat => Constant(Float.intBitsToFloat(readLong(len).toInt))
+ case LITERALdouble => Constant(Double.longBitsToDouble(readLong(len)))
+ case LITERALstring => Constant(readNameRef().toString())
+ case LITERALnull => Constant(null)
+ case LITERALclass => Constant(readTypeRef())
+ case LITERALenum => Constant(readSymbolRef())
+ case _ => noSuchConstantTag(tag, len)
+ }
+ }
+
+ def noSuchConstantTag(tag: Int, len: Int): Constant =
+ errorBadSignature("bad constant tag: " + tag)
+
+ /** Read children and store them into the corresponding symbol.
+ */
+ protected def readChildren() {
+ val tag = readByte()
+ assert(tag == CHILDREN)
+ val end = readNat() + readIndex
+ val target = readSymbolRef()
+ while (readIndex != end) target addChild readSymbolRef()
+ }
+
+ /** Read an annotation argument, which is pickled either
+ * as a Constant or a Tree.
+ */
+ protected def readAnnotArg(i: Int): Tree = {
+ if (bytes(index(i)) == TREE) {
+ at(i, readTree)
+ } else {
+ val const = at(i, readConstant)
+ global.Literal(const).setType(const.tpe)
+ }
+ }
+
+ /** Read a ClassfileAnnotArg (argument to a classfile annotation)
+ */
+ protected def readClassfileAnnotArg(i: Int): ClassfileAnnotArg = bytes(index(i)) match {
+ case ANNOTINFO =>
+ NestedAnnotArg(at(i, readAnnotation))
+ case ANNOTARGARRAY =>
+ at(i, () => {
+ readByte() // skip the `annotargarray` tag
+ val end = readNat() + readIndex
+ ArrayAnnotArg(until(end, () => readClassfileAnnotArg(readNat())).toArray(classfileAnnotArgManifest))
+ })
+ case _ =>
+ LiteralAnnotArg(at(i, readConstant))
+ }
+
+ /** Read an AnnotationInfo. Not to be called directly, use
+ * readAnnotation or readSymbolAnnotation
+ */
+ protected def readAnnotationInfo(end: Int): AnnotationInfo = {
+ val atp = readTypeRef()
+ val args = new ListBuffer[Tree]
+ val assocs = new ListBuffer[(Name, ClassfileAnnotArg)]
+ while (readIndex != end) {
+ val argref = readNat()
+ if (isNameEntry(argref)) {
+ val name = at(argref, readName)
+ val arg = readClassfileAnnotArg(readNat())
+ assocs += ((name, arg))
+ }
+ else
+ args += readAnnotArg(argref)
+ }
+ AnnotationInfo(atp, args.toList, assocs.toList)
+ }
+
+ /** Read an annotation and as a side effect store it into
+ * the symbol it requests. Called at top-level, for all
+ * (symbol, annotInfo) entries. */
+ protected def readSymbolAnnotation() {
+ val tag = readByte()
+ if (tag != SYMANNOT)
+ errorBadSignature("symbol annotation expected ("+ tag +")")
+ val end = readNat() + readIndex
+ val target = readSymbolRef()
+ target.addAnnotation(readAnnotationInfo(end))
+ }
+
+ /** Read an annotation and return it. Used when unpickling
+ * an ANNOTATED(WSELF)tpe or a NestedAnnotArg */
+ protected def readAnnotation(): AnnotationInfo = {
+ val tag = readByte()
+ if (tag != ANNOTINFO)
+ errorBadSignature("annotation expected (" + tag + ")")
+ val end = readNat() + readIndex
+ readAnnotationInfo(end)
+ }
+
+ /* Read an abstract syntax tree */
+ protected def readTree(): Tree = {
+ val outerTag = readByte()
+ if (outerTag != TREE)
+ errorBadSignature("tree expected (" + outerTag + ")")
+ val end = readNat() + readIndex
+ val tag = readByte()
+ val tpe = if (tag == EMPTYtree) NoType else readTypeRef()
+
+ // Set by the three functions to follow. If symbol is non-null
+ // after the the new tree 't' has been created, t has its Symbol
+ // set to symbol; and it always has its Type set to tpe.
+ var symbol: Symbol = null
+ var mods: Modifiers = null
+ var name: Name = null
+
+ /** Read a Symbol, Modifiers, and a Name */
+ def setSymModsName() {
+ symbol = readSymbolRef()
+ mods = readModifiersRef()
+ name = readNameRef()
+ }
+ /** Read a Symbol and a Name */
+ def setSymName() {
+ symbol = readSymbolRef()
+ name = readNameRef()
+ }
+ /** Read a Symbol */
+ def setSym() {
+ symbol = readSymbolRef()
+ }
+
+ val t = tag match {
+ case EMPTYtree =>
+ EmptyTree
+
+ case PACKAGEtree =>
+ setSym()
+ // val discardedSymbol = readSymbolRef() // XXX is symbol intentionally not set?
+ val pid = readTreeRef().asInstanceOf[RefTree]
+ val stats = until(end, readTreeRef)
+ PackageDef(pid, stats)
+
+ case CLASStree =>
+ setSymModsName()
+ val impl = readTemplateRef()
+ val tparams = until(end, readTypeDefRef)
+ ClassDef(mods, name, tparams, impl)
+
+ case MODULEtree =>
+ setSymModsName()
+ ModuleDef(mods, name, readTemplateRef())
+
+ case VALDEFtree =>
+ setSymModsName()
+ val tpt = readTreeRef()
+ val rhs = readTreeRef()
+ ValDef(mods, name, tpt, rhs)
+
+ case DEFDEFtree =>
+ setSymModsName()
+ val tparams = times(readNat(), readTypeDefRef)
+ val vparamss = times(readNat(), () => times(readNat(), readValDefRef))
+ val tpt = readTreeRef()
+ val rhs = readTreeRef()
+
+ DefDef(mods, name, tparams, vparamss, tpt, rhs)
+
+ case TYPEDEFtree =>
+ setSymModsName()
+ val rhs = readTreeRef()
+ val tparams = until(end, readTypeDefRef)
+ TypeDef(mods, name, tparams, rhs)
+
+ case LABELtree =>
+ setSymName()
+ val rhs = readTreeRef()
+ val params = until(end, readIdentRef)
+ LabelDef(name, params, rhs)
+
+ case IMPORTtree =>
+ setSym()
+ val expr = readTreeRef()
+ val selectors = until(end, () => {
+ val from = readNameRef()
+ val to = readNameRef()
+ ImportSelector(from, -1, to, -1)
+ })
+
+ Import(expr, selectors)
+
+ case TEMPLATEtree =>
+ setSym()
+ val parents = times(readNat(), readTreeRef)
+ val self = readValDefRef()
+ val body = until(end, readTreeRef)
+
+ Template(parents, self, body)
+
+ case BLOCKtree =>
+ val expr = readTreeRef()
+ val stats = until(end, readTreeRef)
+ Block(stats, expr)
+
+ case CASEtree =>
+ val pat = readTreeRef()
+ val guard = readTreeRef()
+ val body = readTreeRef()
+ CaseDef(pat, guard, body)
+
+ case ALTERNATIVEtree =>
+ Alternative(until(end, readTreeRef))
+
+ case STARtree =>
+ Star(readTreeRef())
+
+ case BINDtree =>
+ setSymName()
+ Bind(name, readTreeRef())
+
+ case UNAPPLYtree =>
+ val fun = readTreeRef()
+ val args = until(end, readTreeRef)
+ UnApply(fun, args)
+
+ case ARRAYVALUEtree =>
+ val elemtpt = readTreeRef()
+ val trees = until(end, readTreeRef)
+ ArrayValue(elemtpt, trees)
+
+ case FUNCTIONtree =>
+ setSym()
+ val body = readTreeRef()
+ val vparams = until(end, readValDefRef)
+ Function(vparams, body)
+
+ case ASSIGNtree =>
+ val lhs = readTreeRef()
+ val rhs = readTreeRef()
+ Assign(lhs, rhs)
+
+ case IFtree =>
+ val cond = readTreeRef()
+ val thenp = readTreeRef()
+ val elsep = readTreeRef()
+ If(cond, thenp, elsep)
+
+ case MATCHtree =>
+ val selector = readTreeRef()
+ val cases = until(end, readCaseDefRef)
+ Match(selector, cases)
+
+ case RETURNtree =>
+ setSym()
+ Return(readTreeRef())
+
+ case TREtree =>
+ val block = readTreeRef()
+ val finalizer = readTreeRef()
+ val catches = until(end, readCaseDefRef)
+ Try(block, catches, finalizer)
+
+ case THROWtree =>
+ Throw(readTreeRef())
+
+ case NEWtree =>
+ New(readTreeRef())
+
+ case TYPEDtree =>
+ val expr = readTreeRef()
+ val tpt = readTreeRef()
+ Typed(expr, tpt)
+
+ case TYPEAPPLYtree =>
+ val fun = readTreeRef()
+ val args = until(end, readTreeRef)
+ TypeApply(fun, args)
+
+ case APPLYtree =>
+ val fun = readTreeRef()
+ val args = until(end, readTreeRef)
+ if (fun.symbol.isOverloaded) {
+ fun.setType(fun.symbol.info)
+ inferMethodAlternative(fun, args map (_.tpe), tpe)
+ }
+ Apply(fun, args)
+
+ case APPLYDYNAMICtree =>
+ setSym()
+ val qual = readTreeRef()
+ val args = until(end, readTreeRef)
+ ApplyDynamic(qual, args)
+
+ case SUPERtree =>
+ setSym()
+ val qual = readNameRef()
+ val mix = readNameRef()
+ Super(qual, mix)
+
+ case THIStree =>
+ setSym()
+ This(readNameRef())
+
+ case SELECTtree =>
+ setSym()
+ val qualifier = readTreeRef()
+ val selector = readNameRef()
+ Select(qualifier, selector)
+
+ case IDENTtree =>
+ setSymName()
+ Ident(name)
+
+ case LITERALtree =>
+ global.Literal(readConstantRef())
+
+ case TYPEtree =>
+ TypeTree()
+
+ case ANNOTATEDtree =>
+ val annot = readTreeRef()
+ val arg = readTreeRef()
+ Annotated(annot, arg)
+
+ case SINGLETONTYPEtree =>
+ SingletonTypeTree(readTreeRef())
+
+ case SELECTFROMTYPEtree =>
+ val qualifier = readTreeRef()
+ val selector = readNameRef()
+ SelectFromTypeTree(qualifier, selector)
+
+ case COMPOUNDTYPEtree =>
+ CompoundTypeTree(readTemplateRef())
+
+ case APPLIEDTYPEtree =>
+ val tpt = readTreeRef()
+ val args = until(end, readTreeRef)
+ AppliedTypeTree(tpt, args)
+
+ case TYPEBOUNDStree =>
+ val lo = readTreeRef()
+ val hi = readTreeRef()
+ TypeBoundsTree(lo, hi)
+
+ case EXISTENTIALTYPEtree =>
+ val tpt = readTreeRef()
+ val whereClauses = until(end, readTreeRef)
+ ExistentialTypeTree(tpt, whereClauses)
+
+ case _ =>
+ noSuchTreeTag(tag, end)
+ }
+
+ if (symbol == null) t setType tpe
+ else t setSymbol symbol setType tpe
+ }
+
+ def noSuchTreeTag(tag: Int, end: Int) =
+ errorBadSignature("unknown tree type (" + tag + ")")
+
+ def readModifiers(): Modifiers = {
+ val tag = readNat()
+ if (tag != MODIFIERS)
+ errorBadSignature("expected a modifiers tag (" + tag + ")")
+ val end = readNat() + readIndex
+ val pflagsHi = readNat()
+ val pflagsLo = readNat()
+ val pflags = (pflagsHi.toLong << 32) + pflagsLo
+ val flags = pickledToRawFlags(pflags)
+ val privateWithin = readNameRef()
+ Modifiers(flags, privateWithin, Nil, Map.empty)
+ }
+
+ /* Read a reference to a pickled item */
+ protected def readNameRef(): Name = at(readNat(), readName)
+ protected def readSymbolRef(): Symbol = at(readNat(), readSymbol)
+ protected def readTypeRef(): Type = at(readNat(), readType)
+ protected def readConstantRef(): Constant = at(readNat(), readConstant)
+ protected def readAnnotationRef(): AnnotationInfo =
+ at(readNat(), readAnnotation)
+ protected def readModifiersRef(): Modifiers =
+ at(readNat(), readModifiers)
+ protected def readTreeRef(): Tree =
+ at(readNat(), readTree)
+
+ protected def readTemplateRef(): Template =
+ readTreeRef() match {
+ case templ:Template => templ
+ case other =>
+ errorBadSignature("expected a template (" + other + ")")
+ }
+ protected def readCaseDefRef(): CaseDef =
+ readTreeRef() match {
+ case tree:CaseDef => tree
+ case other =>
+ errorBadSignature("expected a case def (" + other + ")")
+ }
+ protected def readValDefRef(): ValDef =
+ readTreeRef() match {
+ case tree:ValDef => tree
+ case other =>
+ errorBadSignature("expected a ValDef (" + other + ")")
+ }
+ protected def readIdentRef(): Ident =
+ readTreeRef() match {
+ case tree:Ident => tree
+ case other =>
+ errorBadSignature("expected an Ident (" + other + ")")
+ }
+ protected def readTypeDefRef(): TypeDef =
+ readTreeRef() match {
+ case tree:TypeDef => tree
+ case other =>
+ errorBadSignature("expected an TypeDef (" + other + ")")
+ }
+
+ protected def errorBadSignature(msg: String) =
+ throw new RuntimeException("malformed Scala signature of " + classRoot.name + " at " + readIndex + "; " + msg)
+
+ protected def errorMissingRequirement(msg: String): Nothing =
+ if (debug) errorBadSignature(msg)
+ else throw new IOException("class file needed by "+classRoot.name+" is missing.\n"+msg)
+
+ protected def errorMissingRequirement(name: Name, owner: Symbol): Nothing =
+ errorMissingRequirement("reference " + NameTransformer.decode(name.toString) + " of " + owner.tpe + " refers to nonexisting symbol.")
+
+ /** pre: `fun` points to a symbol with an overloaded type.
+ * Selects the overloaded alternative of `fun` which best matches given
+ * argument types `argtpes` and result type `restpe`. Stores this alternative as
+ * the symbol of `fun`.
+ */
+ def inferMethodAlternative(fun: Tree, argtpes: List[Type], restpe: Type)
+
+ /** Create a lazy type which when completed returns type at index `i`. */
+ def newLazyTypeRef(i: Int): LazyType
+
+ /** Create a lazy type which when completed returns type at index `i` and sets alias
+ * of completed symbol to symbol at index `j`
+ */
+ def newLazyTypeRefAndAlias(i: Int, j: Int): LazyType
+ }
+}
diff --git a/src/library/scala/reflect/generic/Universe.scala b/src/library/scala/reflect/generic/Universe.scala
new file mode 100755
index 0000000000..101295ae79
--- /dev/null
+++ b/src/library/scala/reflect/generic/Universe.scala
@@ -0,0 +1,16 @@
+package scala.reflect
+package generic
+
+abstract class Universe extends Symbols
+ with Types
+ with Constants
+ with Scopes
+ with Names
+ with StdNames
+ with Trees
+ with AnnotationInfos
+ with StandardDefinitions {
+ type Position
+ val NoPosition: Position
+}
+
diff --git a/src/library/scala/runtime/BoxesRunTime.java b/src/library/scala/runtime/BoxesRunTime.java
index 087331e1c5..7c27835b5a 100644
--- a/src/library/scala/runtime/BoxesRunTime.java
+++ b/src/library/scala/runtime/BoxesRunTime.java
@@ -28,7 +28,7 @@ import scala.math.ScalaNumber;
* @author Martin Odersky
* @contributor Stepan Koltsov
* @version 2.0 */
-public class BoxesRunTime
+public final class BoxesRunTime
{
private static final int CHAR = 0, BYTE = 1, SHORT = 2, INT = 3, LONG = 4, FLOAT = 5, DOUBLE = 6, OTHER = 7;
@@ -136,38 +136,51 @@ public class BoxesRunTime
* in any case, we dispatch to it as soon as we spot one on either side.
*/
public static boolean equals2(Object x, Object y) {
- if (x instanceof Number) {
- Number xn = (Number)x;
-
- if (y instanceof Number) {
- Number yn = (Number)y;
- int xcode = eqTypeCode(xn);
- int ycode = eqTypeCode(yn);
- switch (ycode > xcode ? ycode : xcode) {
- case INT:
- return xn.intValue() == yn.intValue();
- case LONG:
- return xn.longValue() == yn.longValue();
- case FLOAT:
- return xn.floatValue() == yn.floatValue();
- case DOUBLE:
- return xn.doubleValue() == yn.doubleValue();
- default:
- if ((yn instanceof ScalaNumber) && !(xn instanceof ScalaNumber))
- return y.equals(x);
- }
- } else if (y instanceof Character)
- return equalsNumChar(xn, (Character)y);
- } else if (x instanceof Character) {
- Character xc = (Character)x;
- if (y instanceof Character)
- return xc.charValue() == ((Character)y).charValue();
- if (y instanceof Number)
- return equalsNumChar((Number)y, xc);
- }
+ if (x instanceof Number)
+ return equalsNumObject((Number)x, y);
+ if (x instanceof Character)
+ return equalsCharObject((Character)x, y);
+
return x.equals(y);
}
+ public static boolean equalsNumObject(Number xn, Object y) {
+ if (y instanceof Number)
+ return equalsNumNum(xn, (Number)y);
+ else if (y instanceof Character)
+ return equalsNumChar(xn, (Character)y);
+
+ return xn.equals(y);
+ }
+
+ public static boolean equalsNumNum(Number xn, Number yn) {
+ int xcode = eqTypeCode(xn);
+ int ycode = eqTypeCode(yn);
+ switch (ycode > xcode ? ycode : xcode) {
+ case INT:
+ return xn.intValue() == yn.intValue();
+ case LONG:
+ return xn.longValue() == yn.longValue();
+ case FLOAT:
+ return xn.floatValue() == yn.floatValue();
+ case DOUBLE:
+ return xn.doubleValue() == yn.doubleValue();
+ default:
+ if ((yn instanceof ScalaNumber) && !(xn instanceof ScalaNumber))
+ return yn.equals(xn);
+ }
+ return xn.equals(yn);
+ }
+
+ public static boolean equalsCharObject(Character xc, Object y) {
+ if (y instanceof Character)
+ return xc.charValue() == ((Character)y).charValue();
+ if (y instanceof Number)
+ return equalsNumChar((Number)y, xc);
+
+ return xc.equals(y);
+ }
+
private static boolean equalsNumChar(Number xn, Character yc) {
char ch = yc.charValue();
switch (eqTypeCode(xn)) {
@@ -212,27 +225,27 @@ public class BoxesRunTime
* verisons are equal. This still needs reconciliation.
*/
public static int hashFromLong(Long n) {
- int iv = n.intValue();
- if (iv == n.longValue()) return iv;
- else return n.hashCode();
+ int iv = n.intValue();
+ if (iv == n.longValue()) return iv;
+ else return n.hashCode();
}
public static int hashFromDouble(Double n) {
- int iv = n.intValue();
- double dv = n.doubleValue();
- if (iv == dv) return iv;
+ int iv = n.intValue();
+ double dv = n.doubleValue();
+ if (iv == dv) return iv;
- long lv = n.longValue();
- if (lv == dv) return Long.valueOf(lv).hashCode();
- else return n.hashCode();
+ long lv = n.longValue();
+ if (lv == dv) return Long.valueOf(lv).hashCode();
+ else return n.hashCode();
}
public static int hashFromFloat(Float n) {
- int iv = n.intValue();
- float fv = n.floatValue();
- if (iv == fv) return iv;
+ int iv = n.intValue();
+ float fv = n.floatValue();
+ if (iv == fv) return iv;
- long lv = n.longValue();
- if (lv == fv) return Long.valueOf(lv).hashCode();
- else return n.hashCode();
+ long lv = n.longValue();
+ if (lv == fv) return Long.valueOf(lv).hashCode();
+ else return n.hashCode();
}
public static int hashFromNumber(Number n) {
if (n instanceof Long) return hashFromLong((Long)n);
diff --git a/src/library/scala/runtime/NonLocalReturnControl.scala b/src/library/scala/runtime/NonLocalReturnControl.scala
new file mode 100644
index 0000000000..5591d4871b
--- /dev/null
+++ b/src/library/scala/runtime/NonLocalReturnControl.scala
@@ -0,0 +1,16 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+// $Id$
+
+
+package scala.runtime
+
+import scala.util.control.ControlThrowable
+
+class NonLocalReturnControl[T](val key: AnyRef, val value: T) extends ControlThrowable
diff --git a/src/library/scala/runtime/NonLocalReturnException.scala b/src/library/scala/runtime/NonLocalReturnException.scala
index 4bd8ceb058..19a216be7c 100644
--- a/src/library/scala/runtime/NonLocalReturnException.scala
+++ b/src/library/scala/runtime/NonLocalReturnException.scala
@@ -11,6 +11,9 @@
package scala.runtime
-import scala.util.control.ControlException
+import scala.util.control.ControlThrowable
-class NonLocalReturnException[T](val key: AnyRef, val value: T) extends RuntimeException with ControlException
+/** !!! This class has been replaced by NonLocalReturnControl and should be deleted.
+ * But, it can't be deleted until starr is updated to use the new name.
+ */
+class NonLocalReturnException[T](val key: AnyRef, val value: T) extends ControlThrowable
diff --git a/src/library/scala/runtime/RichChar.scala b/src/library/scala/runtime/RichChar.scala
index 63fbe20f3c..f5e2625fd8 100644
--- a/src/library/scala/runtime/RichChar.scala
+++ b/src/library/scala/runtime/RichChar.scala
@@ -12,7 +12,7 @@
package scala.runtime
import java.lang.Character
-import collection.{IndexedSeq, IndexedSeqView}
+import collection.immutable.NumericRange
/** <p>
* For example, in the following code
@@ -82,22 +82,14 @@ final class RichChar(x: Char) extends Proxy with Ordered[Char] {
@deprecated("Use ch.isUpper instead")
def isUpperCase: Boolean = isUpper
- /** Create a <code>[Char]</code> over the characters from 'x' to 'y' - 1
+ /** Create a <code>[Char]</code> over the characters from 'x' to 'limit' - 1
*/
- def until(limit: Char): IndexedSeqView[Char, IndexedSeq[Char]] =
- if (limit <= x) IndexedSeq.empty.view
- else
- new IndexedSeqView[Char, IndexedSeq[Char]] {
- protected def underlying = IndexedSeq.empty[Char]
- def length = limit - x
- def apply(i: Int): Char = {
- require(i >= 0 && i < length)
- (x + i).toChar
- }
- }
-
- /** Create a <code>IndexedSeqView[Char]</code> over the characters from 'x' to 'y'
+ def until(limit: Char): NumericRange[Char] =
+ new NumericRange.Exclusive(x, limit, 1.toChar)
+
+ /** Create a <code>IndexedSeqView[Char]</code> over the characters from 'x' to 'limit'
*/
- def to(y: Char): IndexedSeqView[Char, IndexedSeq[Char]] = until((y + 1).toChar)
+ def to(limit: Char): NumericRange[Char] =
+ new NumericRange.Inclusive(x, limit, 1.toChar)
}
diff --git a/src/library/scala/runtime/ScalaRunTime.scala b/src/library/scala/runtime/ScalaRunTime.scala
index 2f6ffb5535..dffebfc892 100644
--- a/src/library/scala/runtime/ScalaRunTime.scala
+++ b/src/library/scala/runtime/ScalaRunTime.scala
@@ -12,10 +12,11 @@
package scala.runtime
import scala.reflect.ClassManifest
-import scala.collection.Seq
-import scala.collection.mutable._
+import scala.collection.{ Seq, IndexedSeq, TraversableView }
+import scala.collection.mutable.WrappedArray
import scala.collection.immutable.{ List, Stream, Nil, :: }
-import scala.util.control.ControlException
+import scala.xml.{ Node, MetaData }
+import scala.util.control.ControlThrowable
/* The object <code>ScalaRunTime</code> provides ...
*/
@@ -89,7 +90,7 @@ object ScalaRunTime {
}
/** Convert a numeric value array to an object array.
- * Needed to deal with vararg arguments of primtive types that are passed
+ * Needed to deal with vararg arguments of primitive types that are passed
* to a generic Java vararg parameter T ...
*/
def toObjectArray(src: AnyRef): Array[Object] = {
@@ -123,7 +124,7 @@ object ScalaRunTime {
private var exception: Throwable =
try { run() ; null }
catch {
- case e: ControlException => throw e // don't catch non-local returns etc
+ case e: ControlThrowable => throw e // don't catch non-local returns etc
case e: Throwable => e
}
@@ -165,7 +166,8 @@ object ScalaRunTime {
@inline def inlinedEquals(x: Object, y: Object): Boolean =
if (x eq y) true
else if (x eq null) false
- else if (x.isInstanceOf[java.lang.Number] || x.isInstanceOf[java.lang.Character]) BoxesRunTime.equals2(x, y)
+ else if (x.isInstanceOf[java.lang.Number]) BoxesRunTime.equalsNumObject(x.asInstanceOf[java.lang.Number], y)
+ else if (x.isInstanceOf[java.lang.Character]) BoxesRunTime.equalsCharObject(x.asInstanceOf[java.lang.Character], y)
else x.equals(y)
def _equals(x: Product, y: Any): Boolean = y match {
@@ -173,6 +175,50 @@ object ScalaRunTime {
case _ => false
}
+ // hashcode -----------------------------------------------------------
+
+ @inline def hash(x: Any): Int =
+ if (x.isInstanceOf[java.lang.Number]) BoxesRunTime.hashFromNumber(x.asInstanceOf[java.lang.Number])
+ else x.hashCode
+
+ @inline def hash(dv: Double): Int = {
+ val iv = dv.toInt
+ if (iv == dv) return iv
+
+ val lv = dv.toLong
+ if (lv == dv) return lv.hashCode
+ else dv.hashCode
+ }
+ @inline def hash(fv: Float): Int = {
+ val iv = fv.toInt
+ if (iv == fv) return iv
+
+ val lv = fv.toLong
+ if (lv == fv) return lv.hashCode
+ else fv.hashCode
+ }
+ @inline def hash(lv: Long): Int = {
+ val iv = lv.toInt
+ if (iv == lv) iv else lv.hashCode
+ }
+ @inline def hash(x: Int): Int = x
+ @inline def hash(x: Short): Int = x.toInt
+ @inline def hash(x: Byte): Int = x.toInt
+ @inline def hash(x: Char): Int = x.toInt
+
+ @inline def hash(x: Number): Int = runtime.BoxesRunTime.hashFromNumber(x)
+ @inline def hash(x: java.lang.Long): Int = {
+ val iv = x.intValue
+ if (iv == x.longValue) iv else x.hashCode
+ }
+
+ /** A helper method for constructing case class equality methods,
+ * because existential types get in the way of a clean outcome and
+ * it's performing a series of Any/Any equals comparisons anyway.
+ * See ticket #2867 for specifics.
+ */
+ def sameElements(xs1: Seq[Any], xs2: Seq[Any]) = xs1 sameElements xs2
+
/** Given any Scala value, convert it to a String.
*
* The primary motivation for this method is to provide a means for
@@ -186,12 +232,26 @@ object ScalaRunTime {
* @return a string representation of <code>arg</code>
*
*/
- def stringOf(arg : Any): String = arg match {
- case null => "null"
- case arg: AnyRef if isArray(arg) =>
- val d: collection.IndexedSeq[Any] = WrappedArray.make(arg).deep
- d.toString
- case arg: WrappedArray[_] => arg.deep.toString
- case arg => arg.toString
+ def stringOf(arg: Any): String = {
+ def inner(arg: Any): String = arg match {
+ case null => "null"
+ // Node extends NodeSeq extends Seq[Node] strikes again
+ case x: Node => x toString
+ // Not to mention MetaData extends Iterable[MetaData]
+ case x: MetaData => x toString
+ case x: AnyRef if isArray(x) => WrappedArray make x map inner mkString ("Array(", ", ", ")")
+ case x: TraversableView[_, _] => x.toString
+ case x: Traversable[_] if !x.hasDefiniteSize => x.toString
+ case x: Traversable[_] =>
+ // Some subclasses of AbstractFile implement Iterable, then throw an
+ // exception if you call iterator. What a world.
+ // And they can't be infinite either.
+ if (x.getClass.getName startsWith "scala.tools.nsc.io") x.toString
+ else (x map inner) mkString (x.stringPrefix + "(", ", ", ")")
+ case x => x toString
+ }
+ val s = inner(arg)
+ val nl = if (s contains "\n") "\n" else ""
+ nl + s + "\n"
}
}
diff --git a/src/library/scala/testing/SUnit.scala b/src/library/scala/testing/SUnit.scala
index cf43bd1b06..d5d845cb98 100644
--- a/src/library/scala/testing/SUnit.scala
+++ b/src/library/scala/testing/SUnit.scala
@@ -12,6 +12,7 @@
package scala.testing
import scala.collection.mutable.ArrayBuffer
+import xml.{ Node, NodeSeq }
/**
* <p>
@@ -237,6 +238,25 @@ object SUnit {
assertTrue("(no message)", actual)
}
+ /** Temporary patchwork trying to nurse xml forward. */
+ def assertEqualsXML(msg: String, expected: NodeSeq, actual: NodeSeq) {
+ if (!expected.xml_==(actual))
+ fail(msg, expected, actual)
+ }
+ def assertEqualsXML(msg: String, expected: Seq[Node], actual: Seq[Node]) {
+ assertEqualsXML(msg, expected: NodeSeq, actual: NodeSeq)
+ }
+
+ def assertEqualsXML(expected: NodeSeq, actual: NodeSeq) {
+ assertEqualsXML("(no message)", expected, actual)
+ }
+
+ def assertSameElementsXML(actual: Seq[Node], expected: Seq[Node]) {
+ val res = (actual: NodeSeq) xml_sameElements expected
+
+ assert(res, "\nassertSameElementsXML:\n actual = %s\n expected = %s".format(actual, expected))
+ }
+
/** throws <code>AssertFailed</code> with given message <code>msg</code>.
*/
def fail(msg: String) {
diff --git a/src/library/scala/throws.scala b/src/library/scala/throws.scala
index 500db0a30a..b932ccc7ac 100644
--- a/src/library/scala/throws.scala
+++ b/src/library/scala/throws.scala
@@ -14,13 +14,13 @@ package scala
/** <p>
* Annotation for specifying the exceptions thrown by a method.
* For example:
- * </p><pre>
- * <b>class</b> Reader(fname: String) {
- * <b>private val</b> in =
- * <b>new</b> BufferedReader(<b>new</b> <a class="java/io/FileReader" href="" target="_top">FileReader</a>(fname))
- * @throws(classOf[<a class="java/io/IOException" href="" target="_top">IOException</a>])
- * <b>def</b> read() = in.read()
- * }</pre>
+ * {{{
+ * class Reader(fname: String) {
+ * private val in = new BufferedReader(new FileReader(fname))
+ * @throws(classOf[IOException])
+ * def read() = in.read()
+ * }
+ * }}}
*
* @author Nikolay Mihaylov
* @version 1.0, 19/05/2006
diff --git a/src/library/scala/util/Properties.scala b/src/library/scala/util/Properties.scala
index 73b5cf855a..b781e46be5 100644
--- a/src/library/scala/util/Properties.scala
+++ b/src/library/scala/util/Properties.scala
@@ -8,12 +8,18 @@
// $Id$
-
package scala.util
+import java.io.{ IOException, PrintWriter }
+
+/** Loads library.properties from the jar. */
+object Properties extends PropertiesTrait {
+ protected def propCategory = "library"
+ protected def pickJarBasedOn = classOf[ScalaObject]
+}
+
private[scala] trait PropertiesTrait
{
- import java.io.{ IOException, PrintWriter }
protected def propCategory: String // specializes the remainder of the values
protected def pickJarBasedOn: Class[_] // props file comes from jar containing this
@@ -21,7 +27,7 @@ private[scala] trait PropertiesTrait
protected val propFilename = "/" + propCategory + ".properties"
/** The loaded properties */
- protected lazy val props: java.util.Properties = {
+ protected lazy val scalaProps: java.util.Properties = {
val props = new java.util.Properties
val stream = pickJarBasedOn getResourceAsStream propFilename
if (stream ne null)
@@ -30,7 +36,6 @@ private[scala] trait PropertiesTrait
props
}
- protected def onull[T <: AnyRef](x: T) = if (x eq null) None else Some(x)
private def quietlyDispose(action: => Unit, disposal: => Unit) =
try { action }
finally {
@@ -38,51 +43,85 @@ private[scala] trait PropertiesTrait
catch { case _: IOException => }
}
- // for values based on system properties
- def sysprop(name: String): String = sysprop(name, "")
- def sysprop(name: String, default: String): String = System.getProperty(name, default)
- def syspropset(name: String, value: String) = System.setProperty(name, value)
+ def propIsSet(name: String) = System.getProperty(name) != null
+ def propIsSetTo(name: String, value: String) = propOrNull(name) == value
+ def propOrElse(name: String, alt: String) = System.getProperty(name, alt)
+ def propOrEmpty(name: String) = propOrElse(name, "")
+ def propOrNull(name: String) = propOrElse(name, null)
+ def propOrNone(name: String) = Option(propOrNull(name))
+ def propOrFalse(name: String) = propOrNone(name) exists (x => List("yes", "on", "true") contains x.toLowerCase)
+ def setProp(name: String, value: String) = System.setProperty(name, value)
+ def clearProp(name: String) = System.clearProperty(name)
+
+ def envOrElse(name: String, alt: String) = Option(System getenv name) getOrElse alt
// for values based on propFilename
- def prop(name: String): String = props.getProperty(name, "")
- def prop(name: String, default: String): String = props.getProperty(name, default)
+ def scalaPropOrElse(name: String, alt: String): String = scalaProps.getProperty(name, alt)
+ def scalaPropOrEmpty(name: String): String = scalaPropOrElse(name, "")
/** The version number of the jar this was loaded from plus "version " prefix,
* or "version (unknown)" if it cannot be determined.
*/
- val versionString = "version " + prop("version.number", "(unknown)")
- val copyrightString = prop("copyright.string", "(c) 2002-2010 LAMP/EPFL")
+ val versionString = "version " + scalaPropOrElse("version.number", "(unknown)")
+ val copyrightString = scalaPropOrElse("copyright.string", "(c) 2002-2010 LAMP/EPFL")
/** This is the encoding to use reading in source files, overridden with -encoding
* Note that it uses "prop" i.e. looks in the scala jar, not the system properties.
*/
- val sourceEncoding = prop("file.encoding", "UTF8")
+ def sourceEncoding = scalaPropOrElse("file.encoding", "UTF-8")
/** This is the default text encoding, overridden (unreliably) with
* JAVA_OPTS="-Dfile.encoding=Foo"
*/
- val encodingString = sysprop("file.encoding", "UTF8")
-
- val isWin = sysprop("os.name") startsWith "Windows"
- val isMac = sysprop("java.vendor") startsWith "Apple"
- val javaClassPath = sysprop("java.class.path")
- val javaHome = sysprop("java.home")
- val javaVmName = sysprop("java.vm.name")
- val javaVmVersion = sysprop("java.vm.version")
- val javaVmInfo = sysprop("java.vm.info")
- val javaVersion = sysprop("java.version")
- val tmpDir = sysprop("java.io.tmpdir")
- val userName = sysprop("user.name")
- val scalaHome = sysprop("scala.home", null) // XXX places do null checks...
+ def encodingString = propOrElse("file.encoding", "UTF-8")
+
+ /** The default end of line character.
+ */
+ def lineSeparator = propOrElse("line.separator", "\n")
+
+ /** Various well-known properties.
+ */
+ def javaClassPath = propOrEmpty("java.class.path")
+ def javaHome = propOrEmpty("java.home")
+ def javaVendor = propOrEmpty("java.vendor")
+ def javaVersion = propOrEmpty("java.version")
+ def javaVmInfo = propOrEmpty("java.vm.info")
+ def javaVmName = propOrEmpty("java.vm.name")
+ def javaVmVendor = propOrEmpty("java.vm.vendor")
+ def javaVmVersion = propOrEmpty("java.vm.version")
+ def osName = propOrEmpty("os.name")
+ def scalaHome = propOrEmpty("scala.home")
+ def tmpDir = propOrEmpty("java.io.tmpdir")
+ def userDir = propOrEmpty("user.dir")
+ def userHome = propOrEmpty("user.home")
+ def userName = propOrEmpty("user.name")
+
+ /** Some derived values.
+ */
+ def isWin = osName startsWith "Windows"
+ def isMac = javaVendor startsWith "Apple"
- // provide a main method so version info can be obtained by running this
- private val writer = new java.io.PrintWriter(Console.err, true)
def versionMsg = "Scala %s %s -- %s".format(propCategory, versionString, copyrightString)
- def main(args: Array[String]) { writer println versionMsg }
-}
+ def scalaCmd = if (isWin) "scala.bat" else "scala"
+ def scalacCmd = if (isWin) "scalac.bat" else "scalac"
-/** Loads library.properties from the jar. */
-object Properties extends PropertiesTrait {
- protected def propCategory = "library"
- protected def pickJarBasedOn = classOf[Application]
+ /** Can the java version be determined to be at least as high as the argument?
+ * Hard to properly future proof this but at the rate 1.7 is going we can leave
+ * the issue for our cyborg grandchildren to solve.
+ */
+ def isJavaAtLeast(version: String) = {
+ val okVersions = version match {
+ case "1.5" => List("1.5", "1.6", "1.7")
+ case "1.6" => List("1.6", "1.7")
+ case "1.7" => List("1.7")
+ case _ => Nil
+ }
+ okVersions exists (javaVersion startsWith _)
+ }
+
+ // provide a main method so version info can be obtained by running this
+ def main(args: Array[String]) {
+ val writer = new PrintWriter(Console.err, true)
+ writer println versionMsg
+ }
}
diff --git a/src/library/scala/util/Random.scala b/src/library/scala/util/Random.scala
index 3baa7e33e3..ffa248d638 100644
--- a/src/library/scala/util/Random.scala
+++ b/src/library/scala/util/Random.scala
@@ -107,19 +107,17 @@ class Random(val self: java.util.Random) {
*
* @since 2.8
*/
-object Random extends Random
-{
- import collection.Traversable
+object Random extends Random {
import collection.mutable.ArrayBuffer
import collection.generic.CanBuildFrom
/** Returns a new collection of the same type in a randomly chosen order.
*
- * @param coll the Traversable to shuffle
- * @return the shuffled Traversable
+ * @param coll the TraversableOnce to shuffle
+ * @return the shuffled TraversableOnce
*/
- def shuffle[T, CC[X] <: Traversable[X]](coll: CC[T])(implicit bf: CanBuildFrom[CC[T], T, CC[T]]): CC[T] = {
- val buf = new ArrayBuffer[T] ++= coll
+ def shuffle[T, CC[X] <: TraversableOnce[X]](xs: CC[T])(implicit bf: CanBuildFrom[CC[T], T, CC[T]]): CC[T] = {
+ val buf = new ArrayBuffer[T] ++= xs
def swap(i1: Int, i2: Int) {
val tmp = buf(i1)
@@ -132,6 +130,6 @@ object Random extends Random
swap(n - 1, k)
}
- bf(coll) ++= buf result
+ bf(xs) ++= buf result
}
}
diff --git a/src/library/scala/util/Sorting.scala b/src/library/scala/util/Sorting.scala
index 73228b53d5..4189f2d912 100644
--- a/src/library/scala/util/Sorting.scala
+++ b/src/library/scala/util/Sorting.scala
@@ -12,46 +12,43 @@
package scala.util
import scala.reflect.ClassManifest
-/** <p>
- * The Sorting object provides functions that can sort various kinds of
- * objects. You can provide a comparison function, or you can request a sort
- * of items that are viewable as <code>Ordered</code>. Some sorts that
- * operate directly on a subset of value types are also provided. These
- * implementations are derived from those in the Sun JDK.
- * </p>
- * <p>
- * Note that stability doesn't matter for value types, so use the quickSort
- * variants for those. <code>stableSort</code> is intended to be used with
- * objects when the prior ordering should be preserved, where possible.
- * </p>
- *
- * @author Ross Judson
- * @version 1.0
- */
+/** The Sorting object provides functions that can sort various kinds of
+ * objects. You can provide a comparison function, or you can request a sort
+ * of items that are viewable as <code>Ordered</code>. Some sorts that
+ * operate directly on a subset of value types are also provided. These
+ * implementations are derived from those in the Sun JDK.
+ *
+ * Note that stability doesn't matter for value types, so use the quickSort
+ * variants for those. <code>stableSort</code> is intended to be used with
+ * objects when the prior ordering should be preserved, where possible.
+ *
+ * @author Ross Judson
+ * @version 1.0
+ */
object Sorting {
/** Provides implicit access to sorting on arbitrary sequences of orderable
* items. This doesn't quite work the way that I want yet -- K should be
* bounded as viewable, but the compiler rejects that.
*/
- implicit def seq2RichSort[K <: Ordered[K] : ClassManifest](s: Seq[K]) = new RichSorting[K](s)
+ // implicit def seq2RichSort[K <: Ordered[K] : ClassManifest](s: Seq[K]) = new RichSorting[K](s)
/** Quickly sort an array of Doubles. */
- def quickSort(a: Array[Double]) = sort1(a, 0, a.length)
+ def quickSort(a: Array[Double]) { sort1(a, 0, a.length) }
- /** Quickly sort an array of items that are viewable as ordered. */
- def quickSort[K <% Ordered[K]](a: Array[K]) = sort1(a, 0, a.length)
+ /** Quickly sort an array of items with an implicit Ordering. */
+ def quickSort[K](a: Array[K])(implicit ord: Ordering[K]) { sort1(a, 0, a.length) }
/** Quickly sort an array of Ints. */
- def quickSort(a: Array[Int]) = sort1(a, 0, a.length)
+ def quickSort(a: Array[Int]) { sort1(a, 0, a.length) }
/** Quickly sort an array of Floats. */
- def quickSort(a: Array[Float]) = sort1(a, 0, a.length)
+ def quickSort(a: Array[Float]) { sort1(a, 0, a.length) }
/** Sort an array of K where K is Ordered, preserving the existing order
- where the values are equal. */
- def stableSort[K <% Ordered[K] : ClassManifest](a: Array[K]) {
- stableSort(a, 0, a.length-1, new Array[K](a.length), (a:K, b:K) => a < b)
+ * where the values are equal. */
+ def stableSort[K](a: Array[K])(implicit m: ClassManifest[K], ord: Ordering[K]) {
+ stableSort(a, 0, a.length-1, new Array[K](a.length), ord.lt _)
}
/** Sorts an array of <code>K</code> given an ordering function
@@ -77,8 +74,8 @@ object Sorting {
}
/** Sorts an arbitrary sequence of items that are viewable as ordered. */
- def stableSort[K <% Ordered[K] : ClassManifest](a: Seq[K]): Array[K] =
- stableSort(a, (a:K, b:K) => a < b)
+ def stableSort[K](a: Seq[K])(implicit m: ClassManifest[K], ord: Ordering[K]): Array[K] =
+ stableSort(a, ord.lt _)
/** Stably sorts a sequence of items given an extraction function that will
* return an ordered key from an item.
@@ -87,10 +84,11 @@ object Sorting {
* @param f the comparison function.
* @return the sorted sequence of items.
*/
- def stableSort[K : ClassManifest, M <% Ordered[M]](a: Seq[K], f: K => M): Array[K] =
- stableSort(a, (a: K, b: K) => f(a) < f(b))
+ def stableSort[K, M](a: Seq[K], f: K => M)(implicit m: ClassManifest[K], ord: Ordering[M]): Array[K] =
+ stableSort(a)(m, ord on f)
- private def sort1[K <% Ordered[K]](x: Array[K], off: Int, len: Int) {
+ private def sort1[K](x: Array[K], off: Int, len: Int)(implicit ord: Ordering[K]) {
+ import ord._
def swap(a: Int, b: Int) {
val t = x(a)
x(a) = x(b)
@@ -532,51 +530,6 @@ object Sorting {
}
}
}
-
- // for testing
- def main(args: Array[String]) {
- val tuples = Array(
- (1, "one"), (1, "un"), (3, "three"), (2, "deux"),
- (2, "two"), (0, "zero"), (3, "trois")
- )
- val integers = Array(
- 3, 4, 0, 4, 5, 0, 3, 3, 0
- )
- val doubles = Array(
- 3.4054752250314283E9,
- 4.9663151227666664E10,
-// 0.0/0.0 is interpreted as Nan
-// 0.0/0.0,
- 4.9663171987125E10,
- 5.785996973446602E9,
-// 0.0/0.0,
- 3.973064849653333E10,
- 3.724737288678125E10
-// 0.0/0.0
- )
- val floats = Array(
- 3.4054752250314283E9f,
- 4.9663151227666664E10f,
-// 0.0f/0.0f,
- 4.9663171987125E10f,
- 5.785996973446602E9f,
-// 0.0f/0.0f,
- 3.973064849653333E10f,
- 3.724737288678125E10f
-// 0.0f/0.0f
- )
- Sorting quickSort tuples
- println(tuples.toList)
-
- Sorting quickSort integers
- println(integers.toList)
-
- Sorting quickSort doubles
- println(doubles.toList)
-
- Sorting quickSort floats
- println(floats.toList)
- }
}
/** <p>
@@ -585,8 +538,7 @@ object Sorting {
* the items are ordered.
* </p>
*/
-class RichSorting[K <: Ordered[K] : ClassManifest](s: Seq[K]) {
-
+class RichSorting[K](s: Seq[K])(implicit m: ClassManifest[K], ord: Ordering[K]) {
/** Returns an array with a sorted copy of the RichSorting's sequence.
*/
def sort = Sorting.stableSort(s)
diff --git a/src/library/scala/util/automata/SubsetConstruction.scala b/src/library/scala/util/automata/SubsetConstruction.scala
index 0ebdd160e7..c8fba39f0e 100644
--- a/src/library/scala/util/automata/SubsetConstruction.scala
+++ b/src/library/scala/util/automata/SubsetConstruction.scala
@@ -57,7 +57,7 @@ class SubsetConstruction[T <: AnyRef](val nfa: NondetWordAutom[T]) {
invIndexMap = invIndexMap.updated(ix, P)
ix += 1
- // make transitiion map
+ // make transition map
val Pdelta = new mutable.HashMap[T, BitSet]
delta.update(P, Pdelta)
diff --git a/src/library/scala/util/automata/WordBerrySethi.scala b/src/library/scala/util/automata/WordBerrySethi.scala
index d3238b6f67..b54fdc53f2 100644
--- a/src/library/scala/util/automata/WordBerrySethi.scala
+++ b/src/library/scala/util/automata/WordBerrySethi.scala
@@ -81,7 +81,7 @@ abstract class WordBerrySethi extends BaseBerrySethi {
this.labels += label
}
- // overriden in BindingBerrySethi
+ // overridden in BindingBerrySethi
protected def seenLabel(r: RegExp, label: lang._labelT): Int = {
pos += 1
seenLabel(r, pos, label)
diff --git a/src/library/scala/util/control/Breaks.scala b/src/library/scala/util/control/Breaks.scala
index 7ae4cba63a..1f06f04418 100644
--- a/src/library/scala/util/control/Breaks.scala
+++ b/src/library/scala/util/control/Breaks.scala
@@ -28,14 +28,14 @@ package scala.util.control
*/
class Breaks {
- private val breakException = new BreakException
+ private val breakException = new BreakControl
/** A block from which one can exit with a `break''. */
def breakable(op: => Unit) {
try {
op
} catch {
- case ex: BreakException =>
+ case ex: BreakControl =>
if (ex ne breakException) throw ex
}
}
@@ -61,5 +61,5 @@ class Breaks {
*/
object Breaks extends Breaks
-private class BreakException extends RuntimeException with ControlException
+private class BreakControl extends ControlThrowable
diff --git a/src/library/scala/util/control/ControlException.scala b/src/library/scala/util/control/ControlThrowable.scala
index 73f2b31e89..090bec4e98 100644
--- a/src/library/scala/util/control/ControlException.scala
+++ b/src/library/scala/util/control/ControlThrowable.scala
@@ -21,19 +21,19 @@ package scala.util.control
*
* <p>Instances of <code>Throwable</code> subclasses marked in
* this way should not normally be caught. Where catch-all behaviour is
- * required <code>ControlException</code>s should be propagated, for
+ * required <code>ControlThrowable</code>s should be propagated, for
* example,</p>
*
* <pre>
- * import scala.util.control.ControlException
+ * import scala.util.control.ControlThrowable
*
* try {
* // Body might throw arbitrarily
* } catch {
- * case ce : ControlException => throw ce // propagate
+ * case ce : ControlThrowable => throw ce // propagate
* case t : Exception => log(t) // log and suppress
* </pre>
*
* @author Miles Sabin
*/
-trait ControlException extends Throwable with NoStackTrace
+trait ControlThrowable extends Throwable with NoStackTrace
diff --git a/src/library/scala/util/logging/ConsoleLogger.scala b/src/library/scala/util/logging/ConsoleLogger.scala
index d4ef268e37..ec4148abc9 100644
--- a/src/library/scala/util/logging/ConsoleLogger.scala
+++ b/src/library/scala/util/logging/ConsoleLogger.scala
@@ -21,8 +21,6 @@ package scala.util.logging
trait ConsoleLogger extends Logged {
/** logs argument to Console using <code>Console.println</code>
- *
- * @param msg ...
*/
override def log(msg: String): Unit = Console.println(msg)
}
diff --git a/src/library/scala/util/matching/Regex.scala b/src/library/scala/util/matching/Regex.scala
index 2ceef4563c..1384dfa47c 100644
--- a/src/library/scala/util/matching/Regex.scala
+++ b/src/library/scala/util/matching/Regex.scala
@@ -107,6 +107,32 @@ class Regex(regex: String, groupNames: String*) {
m.replaceAll(replacement)
}
+ /**
+ * Replaces all matches using a replacer function.
+ *
+ * @param target The string to match.
+ * @param replacer The function which maps a match to another string.
+ * @return The target string after replacements.
+ */
+ def replaceAllIn(target: java.lang.CharSequence, replacer: Match => String): String = {
+ val it = new Regex.MatchIterator(target, this, groupNames).replacementData
+ while (it.hasNext) {
+ val matchdata = it.next
+ it.replace(replacer(matchdata))
+ }
+ it.replaced
+ }
+
+ def replaceSomeIn(target: java.lang.CharSequence, replacer: Match => Option[String]): String = {
+ val it = new Regex.MatchIterator(target, this, groupNames).replacementData
+ while (it.hasNext) {
+ val matchdata = it.next
+ val replaceopt = replacer(matchdata)
+ if (replaceopt != None) it.replace(replaceopt.get)
+ }
+ it.replaced
+ }
+
/** Replaces the first match by a string.
*
* @param target The string to match
@@ -227,7 +253,7 @@ object Regex {
}
- /** A case class for a succesful match.
+ /** A case class for a successful match.
*/
class Match(val source: java.lang.CharSequence,
matcher: Matcher,
@@ -264,12 +290,17 @@ object Regex {
def unapply(m: Match): Some[String] = Some(m.matched)
}
+ /** An extractor object that yields groups in the match. */
+ object Groups {
+ def unapplySeq(m: Match): Option[Seq[String]] = if (m.groupCount > 0) Some(1 to m.groupCount map m.group) else None
+ }
+
/** A class to step through a sequence of regex matches
*/
class MatchIterator(val source: java.lang.CharSequence, val regex: Regex, val groupNames: Seq[String])
extends Iterator[String] with MatchData { self =>
- private val matcher = regex.pattern.matcher(source)
+ protected val matcher = regex.pattern.matcher(source)
private var nextSeen = false
/** Is there another match? */
@@ -307,6 +338,31 @@ object Regex {
def hasNext = self.hasNext
def next = { self.next; new Match(source, matcher, groupNames).force }
}
+
+ /** Convert to an iterator that yields MatchData elements instead of Strings and has replacement support */
+ private[matching] def replacementData = new Iterator[Match] with Replacement {
+ def matcher = self.matcher
+ def hasNext = self.hasNext
+ def next = { self.next; new Match(source, matcher, groupNames).force }
+ }
+ }
+
+ /**
+ * A trait able to build a string with replacements assuming it has a matcher.
+ * Meant to be mixed in with iterators.
+ */
+ private[matching] trait Replacement {
+ protected def matcher: Matcher
+
+ private var sb = new java.lang.StringBuffer
+
+ def replaced = {
+ val newsb = new java.lang.StringBuffer(sb)
+ matcher.appendTail(newsb)
+ newsb.toString
+ }
+
+ def replace(rs: String) = matcher.appendReplacement(sb, rs)
}
}
diff --git a/src/library/scala/util/parsing/ast/Binders.scala b/src/library/scala/util/parsing/ast/Binders.scala
index b397e900da..7a9b8e5dcd 100644
--- a/src/library/scala/util/parsing/ast/Binders.scala
+++ b/src/library/scala/util/parsing/ast/Binders.scala
@@ -127,10 +127,8 @@ trait Binders extends AbstractSyntax with Mappable {
* (e.g. the variable name in a local variable declaration)
*
* @param b a new binder that is distinct from the existing binders in this scope,
- * and shares their conceptual scope
- * @pre canAddBinder(b)
- * @post binds(b)
- * @post getElementFor(b) eq b
+ * and shares their conceptual scope. canAddBinder(b)` must hold.`
+ * @return `binds(b)` and `getElementFor(b) eq b` will hold.
*/
def addBinder(b: binderType) { substitution += Pair(b, b) }
@@ -140,7 +138,7 @@ trait Binders extends AbstractSyntax with Mappable {
* linked to its `UnderBinder' (i.e., while parsing, BoundElements may be added to the Scope
* associated to the UnderBinder, but after that, no changes are allowed, except for substitution)?
*
- * @returns true if `b' had not been added yet
+ * @return true if `b' had not been added yet
*/
def canAddBinder(b: binderType): Boolean = !binds(b)
@@ -150,17 +148,15 @@ trait Binders extends AbstractSyntax with Mappable {
* a proxy for the element it is bound to by its binder, `substitute' may thus be thought of
* as replacing all the bound occurrences of the given binder `b' by their new value `value'.
*
- * @param b the binder whose bound occurrences should be given a new value
+ * @param b the binder whose bound occurrences should be given a new value. `binds(b)` must hold.
* @param value the new value for the bound occurrences of `b'
- * @pre binds(b)
- * @post getElementFor(b) eq value
+ * @return `getElementFor(b) eq value` will hold.
*/
def substitute(b: binderType, value: Element): Unit = substitution(b) = value
/** Returns the current value for the bound occurrences of `b'.
*
- * @param b the contained binder whose current value should be returned
- * @pre binds(b)
+ * @param b the contained binder whose current value should be returned `binds(b)` must hold.
*/
def getElementFor(b: binderType): Element = substitution(b)
@@ -173,7 +169,7 @@ trait Binders extends AbstractSyntax with Mappable {
def allowForwardRef: Scope[binderType] = this // TODO
/** Return a nested scope -- binders entered into it won't be visible in this scope, but if this scope allows forward references,
- the binding in the returned scope also does, and thus the check that all variables are bound is deferred until this scope is left **/
+ * the binding in the returned scope also does, and thus the check that all variables are bound is deferred until this scope is left **/
def nested: Scope[binderType] = this // TODO
def onEnter {}
@@ -193,7 +189,7 @@ trait Binders extends AbstractSyntax with Mappable {
* A `BoundElement' is represented textually by its bound element, followed by its scope's `id'.
* For example: `x@1' represents the variable `x' that is bound in the scope with `id' `1'.
*
- * @invar scope.binds(el)
+ * @note `scope.binds(el)` holds before and after.
*/
case class BoundElement[boundElement <: NameElement](el: boundElement, scope: Scope[boundElement]) extends NameElement with Proxy with BindingSensitive {
/** Returns the element this `BoundElement' stands for.
@@ -300,7 +296,7 @@ trait Binders extends AbstractSyntax with Mappable {
*
* The name `sequence' comes from the fact that this method's type is equal to the type of monadic sequence.
*
- * @pre !orig.isEmpty implies orig.forall(ub => ub.scope eq orig(0).scope)
+ * @note `!orig.isEmpty` implies `orig.forall(ub => ub.scope eq orig(0).scope)`
*
*/
def sequence[bt <: NameElement, st <% Mappable[st]](orig: List[UnderBinder[bt, st]]): UnderBinder[bt, List[st]] =
diff --git a/src/library/scala/util/parsing/combinator/Parsers.scala b/src/library/scala/util/parsing/combinator/Parsers.scala
index 6fe35ad3b0..d270757189 100644
--- a/src/library/scala/util/parsing/combinator/Parsers.scala
+++ b/src/library/scala/util/parsing/combinator/Parsers.scala
@@ -48,35 +48,21 @@ import scala.annotation.tailrec
* of the input.
* </p>
*
- * @requires Elem the type of elements the provided parsers consume
- * (When consuming invidual characters, a parser is typically called a ``scanner'',
- * which produces ``tokens'' that are consumed by what is normally called a ``parser''.
- * Nonetheless, the same principles apply, regardless of the input type.)</p>
- *<p>
- * @provides Input = Reader[Elem]
- * The type of input the parsers in this component expect.</p>
- *<p>
- * @provides Parser[+T] extends (Input => ParseResult[T])
- * Essentially, a `Parser[T]' is a function from `Input' to `ParseResult[T]'.</p>
- *<p>
- * @provides ParseResult[+T] is like an `Option[T]', in the sense that it is either
- * `Success[T]', which consists of some result (:T) (and the rest of the input) or
- * `Failure[T]', which provides an error message (and the rest of the input).</p>
- *
* @author Martin Odersky, Iulian Dragos, Adriaan Moors
*/
trait Parsers {
- /** the type of input elements */
+ /** the type of input elements the provided parsers consume (When consuming invidual characters, a parser is typically
+ * called a ``scanner'', which produces ``tokens'' that are consumed by what is normally called a ``parser''.
+ * Nonetheless, the same principles apply, regardless of the input type.) */
type Elem
- /** The parser input is an abstract reader of input elements */
+ /** The parser input is an abstract reader of input elements, i.e. the type of input the parsers in this component
+ * expect. */
type Input = Reader[Elem]
- /** A base class for parser results.
- * A result is either successful or not (failure may be fatal, i.e.,
- * an Error, or not, i.e., a Failure)
- * On success, provides a result of type <code>T</code>.
- */
+ /** A base class for parser results. A result is either successful or not (failure may be fatal, i.e., an Error, or
+ * not, i.e., a Failure). On success, provides a result of type `T` which consists of some result (and the rest of
+ * the input). */
sealed abstract class ParseResult[+T] {
/** Functional composition of ParseResults
*
@@ -302,7 +288,7 @@ trait Parsers {
* characters accepts.</p>
*
* @param q a parser that accepts if p consumes less characters.
- * @return a `Parser' that returns the result of the parser consuming the most characteres (out of `p' and `q').
+ * @return a `Parser' that returns the result of the parser consuming the most characters (out of `p' and `q').
*/
def ||| [U >: T](q: => Parser[U]): Parser[U] = new Parser[U] {
def apply(in: Input) = {
@@ -362,7 +348,7 @@ trait Parsers {
def ^? [U](f: PartialFunction[T, U]): Parser[U] = ^?(f, r => "Constructor function not defined at "+r)
- /** A parser combinator that parameterises a subsequent parser with the result of this one
+ /** A parser combinator that parameterizes a subsequent parser with the result of this one
*
*<p>
* Use this combinator when a parser depends on the result of a previous parser. `p' should be
@@ -592,13 +578,18 @@ trait Parsers {
def rep1[T](first: => Parser[T], p: => Parser[T]): Parser[List[T]] = Parser { in =>
val elems = new ListBuffer[T]
- @tailrec def applyp(in0: Input): ParseResult[List[T]] = p(in0) match {
- case Success(x, rest) => elems += x ; applyp(rest)
- case _ => Success(elems.toList, in0)
+ def continue(in: Input): ParseResult[List[T]] = {
+ val p0 = p // avoid repeatedly re-evaluating by-name parser
+ @tailrec def applyp(in0: Input): ParseResult[List[T]] = p0(in0) match {
+ case Success(x, rest) => elems += x ; applyp(rest)
+ case _ => Success(elems.toList, in0)
+ }
+
+ applyp(in)
}
first(in) match {
- case Success(x, rest) => elems += x ; applyp(rest)
+ case Success(x, rest) => elems += x ; continue(rest)
case ns: NoSuccess => ns
}
}
@@ -616,10 +607,11 @@ trait Parsers {
def repN[T](num: Int, p: => Parser[T]): Parser[List[T]] =
if (num == 0) success(Nil) else Parser { in =>
val elems = new ListBuffer[T]
+ val p0 = p // avoid repeatedly re-evaluating by-name parser
@tailrec def applyp(in0: Input): ParseResult[List[T]] =
if (elems.length == num) Success(elems.toList, in0)
- else p(in0) match {
+ else p0(in0) match {
case Success(x, rest) => elems += x ; applyp(rest)
case ns: NoSuccess => return ns
}
@@ -670,7 +662,7 @@ trait Parsers {
/** A parser generator that generalises the rep1sep generator so that `q', which parses the separator,
* produces a right-associative function that combines the elements it separates. Additionally,
- * The right-most (last) element and the left-most combinating function have to be supplied.
+ * The right-most (last) element and the left-most combining function have to be supplied.
*
* rep1sep(p: Parser[T], q) corresponds to chainr1(p, q ^^ cons, cons, Nil) (where val cons = (x: T, y: List[T]) => x :: y)
*
diff --git a/src/library/scala/util/parsing/combinator/lexical/Lexical.scala b/src/library/scala/util/parsing/combinator/lexical/Lexical.scala
index fc3100053a..7a35bcad7d 100644
--- a/src/library/scala/util/parsing/combinator/lexical/Lexical.scala
+++ b/src/library/scala/util/parsing/combinator/lexical/Lexical.scala
@@ -9,11 +9,12 @@
// $Id$
-package scala.util.parsing.combinator.lexical
-import scala.util.parsing.combinator._
+package scala.util.parsing
+package combinator
+package lexical
-import scala.util.parsing.syntax._
-import scala.util.parsing.input.CharArrayReader.EofCh
+import token._
+import input.CharArrayReader.EofCh
/** <p>
* This component complements the <code>Scanners</code> component with
diff --git a/src/library/scala/util/parsing/combinator/lexical/Scanners.scala b/src/library/scala/util/parsing/combinator/lexical/Scanners.scala
index 07f4975cf8..96e9a76572 100644
--- a/src/library/scala/util/parsing/combinator/lexical/Scanners.scala
+++ b/src/library/scala/util/parsing/combinator/lexical/Scanners.scala
@@ -9,11 +9,12 @@
// $Id$
-package scala.util.parsing.combinator.lexical
-import scala.util.parsing.combinator._
+package scala.util.parsing
+package combinator
+package lexical
-import scala.util.parsing.syntax._
-import scala.util.parsing.input._
+import token._
+import input._
/** <p>
* This component provides core functionality for lexical parsers.
@@ -23,13 +24,6 @@ import scala.util.parsing.input._
* {@see StdLexical}, for more functionality.
* </p>
*
- * @requires token a parser that produces a token (from a stream of characters)
- * @requires whitespace a unit-parser for white-space
- * @provides Scanner essentially a parser that parses a stream of characters
- * to produce `Token's, which are typically passed to a
- * syntactical parser (which operates on `Token's, not on
- * individual characters).
- *
* @author Martin Odersky, Adriaan Moors
*/
trait Scanners extends Parsers {
diff --git a/src/library/scala/util/parsing/combinator/lexical/StdLexical.scala b/src/library/scala/util/parsing/combinator/lexical/StdLexical.scala
index 1bb3e7c83f..bc53e3731d 100644
--- a/src/library/scala/util/parsing/combinator/lexical/StdLexical.scala
+++ b/src/library/scala/util/parsing/combinator/lexical/StdLexical.scala
@@ -9,11 +9,12 @@
// $Id$
-package scala.util.parsing.combinator.lexical
-import scala.util.parsing.combinator._
+package scala.util.parsing
+package combinator
+package lexical
-import scala.util.parsing.syntax._
-import scala.util.parsing.input.CharArrayReader.EofCh
+import token._
+import input.CharArrayReader.EofCh
import collection.mutable.HashSet
/** <p>
diff --git a/src/library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala b/src/library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala
index 85c0592572..31fa06035c 100644
--- a/src/library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala
+++ b/src/library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala
@@ -8,12 +8,12 @@
// $Id$
+package scala.util.parsing
+package combinator
+package syntactical
-package scala.util.parsing.combinator.syntactical
-import scala.util.parsing.combinator._
-
-import scala.util.parsing.syntax._
-import scala.util.parsing.combinator.lexical.StdLexical
+import token._
+import lexical.StdLexical
/** This component provides primitive parsers for the standard tokens defined in `StdTokens'.
*
diff --git a/src/library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala b/src/library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala
index 544c7f08d5..5b62280b78 100644
--- a/src/library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala
+++ b/src/library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala
@@ -9,11 +9,12 @@
// $Id$
-package scala.util.parsing.combinator.syntactical
-import scala.util.parsing.combinator._
+package scala.util.parsing
+package combinator
+package syntactical
-import scala.util.parsing.syntax._
-import scala.collection.mutable.HashMap
+import token._
+import collection.mutable.HashMap
/** This component provides primitive parsers for the standard tokens defined in `StdTokens'.
*
diff --git a/src/library/scala/util/parsing/combinator/syntactical/TokenParsers.scala b/src/library/scala/util/parsing/combinator/syntactical/TokenParsers.scala
index 01557c32a7..ae4120b402 100644
--- a/src/library/scala/util/parsing/combinator/syntactical/TokenParsers.scala
+++ b/src/library/scala/util/parsing/combinator/syntactical/TokenParsers.scala
@@ -8,23 +8,17 @@
// $Id$
+package scala.util.parsing
+package combinator
+package syntactical
-package scala.util.parsing.combinator.syntactical
-import scala.util.parsing.combinator._
-
-/** <p>
- * This is the core component for token-based parsers.
- * </p>
- * <p>
- * @requires lexical a component providing the tokens consumed by the
- * parsers in this component.
- * </p>
+/** This is the core component for token-based parsers.
*
* @author Martin Odersky, Adriaan Moors
*/
trait TokenParsers extends Parsers {
/** Tokens is the abstract type of the `Token's consumed by the parsers in this component*/
- type Tokens <: scala.util.parsing.syntax.Tokens
+ type Tokens <: token.Tokens
/** lexical is the component responsible for consuming some basic kind of
* input (usually character-based) and turning it into the tokens
diff --git a/src/library/scala/util/parsing/syntax/StdTokens.scala b/src/library/scala/util/parsing/combinator/token/StdTokens.scala
index 2321082b92..ea565235d1 100644
--- a/src/library/scala/util/parsing/syntax/StdTokens.scala
+++ b/src/library/scala/util/parsing/combinator/token/StdTokens.scala
@@ -6,7 +6,9 @@
** |/ **
\* */
-package scala.util.parsing.syntax
+package scala.util.parsing
+package combinator
+package token
/** This component provides the standard `Token's for a simple, Scala-like language.
*
diff --git a/src/library/scala/util/parsing/syntax/Tokens.scala b/src/library/scala/util/parsing/combinator/token/Tokens.scala
index fdc6385b6e..b7a568efea 100644
--- a/src/library/scala/util/parsing/syntax/Tokens.scala
+++ b/src/library/scala/util/parsing/combinator/token/Tokens.scala
@@ -6,7 +6,9 @@
** |/ **
\* */
-package scala.util.parsing.syntax
+package scala.util.parsing
+package combinator
+package token
/** This component provides the notion of `Token', the unit of information that is passed from lexical
* parsers in the `Lexical' component to the parsers in the `TokenParsers' component.
diff --git a/src/library/scala/util/parsing/input/Position.scala b/src/library/scala/util/parsing/input/Position.scala
index 6922bec19c..482610ca28 100644
--- a/src/library/scala/util/parsing/input/Position.scala
+++ b/src/library/scala/util/parsing/input/Position.scala
@@ -53,7 +53,7 @@ trait Position {
*<pre> List(this, is, a, line, from, the, document)
* ^</pre>
*/
- def longString = lineContents+"\n"+(" " * (column - 1))+"^"
+ def longString = lineContents+"\n"+lineContents.take(column-1).map{x => if (x == '\t') x else ' ' } + "^"
/** Compare this position to another, by first comparing their line numbers,
* and then -- if necessary -- using the columns to break a tie.
diff --git a/src/library/scala/util/parsing/syntax/package.scala b/src/library/scala/util/parsing/syntax/package.scala
new file mode 100644
index 0000000000..9dc909ca60
--- /dev/null
+++ b/src/library/scala/util/parsing/syntax/package.scala
@@ -0,0 +1,19 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.util.parsing
+
+import scala.util.parsing.combinator.token
+
+/** If deprecating the whole package worked, that's what would best
+ * be done, but it doesn't (yet) so it isn't.
+ */
+package object syntax {
+ @deprecated("Moved to scala.util.parsing.combinator.token") type Tokens = token.Tokens
+ @deprecated("Moved to scala.util.parsing.combinator.token") type StdTokens = token.StdTokens
+}
diff --git a/src/library/scala/xml/Atom.scala b/src/library/scala/xml/Atom.scala
index 129a0803d2..7c66995573 100644
--- a/src/library/scala/xml/Atom.scala
+++ b/src/library/scala/xml/Atom.scala
@@ -8,9 +8,7 @@
// $Id$
-
package scala.xml
-import collection.mutable.StringBuilder
/** The class <code>Atom</code> provides an XML node for text (PCDATA).
* It is used in both non-bound and bound XML representations.
@@ -24,17 +22,21 @@ class Atom[+A](val data: A) extends SpecialNode
if (data == null)
throw new IllegalArgumentException("cannot construct Atom(null)")
+ override def basisForHashCode: Seq[Any] = Seq(data)
+ override def strict_==(other: Equality) = other match {
+ case x: Atom[_] => data == x.data
+ case _ => false
+ }
+ override def canEqual(other: Any) = other match {
+ case _: Atom[_] => true
+ case _ => false
+ }
+
final override def doCollectNamespaces = false
final override def doTransform = false
def label = "#PCDATA"
- override def equals(x: Any) = x match {
- case s:Atom[_] => data == s.data
- case _ => false
- }
- override def hashCode() = data.hashCode()
-
/** Returns text, with some characters escaped according to the XML
* specification.
*
diff --git a/src/library/scala/xml/Attribute.scala b/src/library/scala/xml/Attribute.scala
index 8ff9fb2ed7..3259526d98 100644
--- a/src/library/scala/xml/Attribute.scala
+++ b/src/library/scala/xml/Attribute.scala
@@ -10,9 +10,6 @@
package scala.xml
-import collection.Seq
-import collection.mutable.StringBuilder
-
/** Attribute defines the interface shared by both
* PrefixedAttribute and UnprefixedAttribute
*/
@@ -45,6 +42,7 @@ object Attribute {
abstract trait Attribute extends MetaData
{
+ def pre: String // will be null if unprefixed
val key: String
val value: Seq[Node]
val next: MetaData
@@ -52,13 +50,43 @@ abstract trait Attribute extends MetaData
def apply(key: String): Seq[Node]
def apply(namespace: String, scope: NamespaceBinding, key: String): Seq[Node]
def copy(next: MetaData): Attribute
- def remove(key: String): MetaData
- def remove(namespace: String, scope: NamespaceBinding, key: String): MetaData
- def isPrefixed: Boolean
+ def remove(key: String) =
+ if (!isPrefixed && this.key == key) next
+ else copy(next remove key)
+
+ def remove(namespace: String, scope: NamespaceBinding, key: String) =
+ if (isPrefixed && this.key == key && (scope getURI pre) == namespace) next
+ else next.remove(namespace, scope, key)
+
+ def isPrefixed: Boolean = pre != null
def getNamespace(owner: Node): String
- def wellformed(scope: NamespaceBinding): Boolean
+ def wellformed(scope: NamespaceBinding): Boolean = {
+ val arg = if (isPrefixed) scope getURI pre else null
+ (next(arg, scope, key) == null) && (next wellformed scope)
+ }
- def equals1(m: MetaData): Boolean
- def toString1(sb: StringBuilder): Unit
+ override def canEqual(other: Any) = other match {
+ case _: Attribute => true
+ case _ => false
+ }
+ override def strict_==(other: Equality) = other match {
+ case x: Attribute => (pre == x.pre) && (key == x.key) && (value sameElements x.value)
+ case _ => false
+ }
+ override def basisForHashCode = List(pre, key, value)
+
+ /** Appends string representation of only this attribute to stringbuffer.
+ */
+ def toString1(sb: StringBuilder) {
+ if (value == null)
+ return
+ if (isPrefixed)
+ sb append pre append ':'
+
+ sb append key append '='
+ val sb2 = new StringBuilder()
+ Utility.sequenceToXML(value, TopScope, sb2, true)
+ Utility.appendQuoted(sb2.toString(), sb)
+ }
}
diff --git a/src/library/scala/xml/Comment.scala b/src/library/scala/xml/Comment.scala
index 4e8cff8d75..9608748601 100644
--- a/src/library/scala/xml/Comment.scala
+++ b/src/library/scala/xml/Comment.scala
@@ -10,7 +10,7 @@
package scala.xml
-import collection.mutable.StringBuilder
+
/** The class <code>Comment</code> implements an XML node for comments.
*
diff --git a/src/library/scala/xml/Document.scala b/src/library/scala/xml/Document.scala
index 3ac50b80b7..6c73252a37 100644
--- a/src/library/scala/xml/Document.scala
+++ b/src/library/scala/xml/Document.scala
@@ -87,4 +87,8 @@ class Document extends NodeSeq with pull.XMLEvent {
def theSeq: Seq[Node] = this.docElem
+ override def canEqual(other: Any) = other match {
+ case _: Document => true
+ case _ => false
+ }
}
diff --git a/src/library/scala/xml/Elem.scala b/src/library/scala/xml/Elem.scala
index 18b513527c..9c58177417 100644
--- a/src/library/scala/xml/Elem.scala
+++ b/src/library/scala/xml/Elem.scala
@@ -8,11 +8,8 @@
// $Id$
-
package scala.xml
-import collection.Seq
-
/** This singleton object contains the apply and unapplySeq methods for convenient construction and
* deconstruction. It is possible to deconstruct any Node instance (that is not a SpecialNode or
* a Group) using the syntax
@@ -26,8 +23,10 @@ object Elem
def apply(prefix: String,label: String, attributes: MetaData, scope: NamespaceBinding, child: Node*) =
new Elem(prefix,label,attributes,scope,child:_*)
- def unapplySeq(n:Node) = if (n.isInstanceOf[SpecialNode] || n.isInstanceOf[Group]) None else
- Some((n.prefix, n.label, n.attributes, n.scope, n.child))
+ def unapplySeq(n: Node) = n match {
+ case _: SpecialNode | _: Group => None
+ case _ => Some((n.prefix, n.label, n.attributes, n.scope, n.child))
+ }
}
/** The case class <code>Elem</code> extends the <code>Node</code> class,
@@ -54,18 +53,17 @@ extends Node
final override def doCollectNamespaces = true
final override def doTransform = true
- if ((null != prefix) && 0 == prefix.length())
+ if (prefix == "")
throw new IllegalArgumentException("prefix of zero length, use null instead")
- if (null == scope)
- throw new IllegalArgumentException("scope is null, try xml.TopScope for empty scope")
+ if (scope == null)
+ throw new IllegalArgumentException("scope is null, use xml.TopScope for empty scope")
//@todo: copy the children,
// setting namespace scope if necessary
// cleaning adjacent text nodes if necessary
- override def hashCode(): Int =
- Utility.hashCode(prefix, label, attributes.hashCode(), scope.hashCode(), child)
+ override def basisForHashCode: Seq[Any] = prefix :: label :: attributes :: child.toList
/** Returns a new element with updated attributes, resolving namespace uris from this element's scope.
* See MetaData.update for details.
diff --git a/src/library/scala/xml/EntityRef.scala b/src/library/scala/xml/EntityRef.scala
index 0806b8fa68..fbc1f351cf 100644
--- a/src/library/scala/xml/EntityRef.scala
+++ b/src/library/scala/xml/EntityRef.scala
@@ -10,7 +10,7 @@
package scala.xml
-import collection.mutable.StringBuilder
+
/** The class <code>EntityRef</code> implements an XML node for entity
diff --git a/src/library/scala/xml/Equality.scala b/src/library/scala/xml/Equality.scala
new file mode 100644
index 0000000000..d09ae10b2d
--- /dev/null
+++ b/src/library/scala/xml/Equality.scala
@@ -0,0 +1,115 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.xml
+
+/** In an attempt to contain the damage being inflicted on
+ * consistency by the ad hoc equals methods spread around
+ * xml, the logic is centralized and all the xml classes
+ * go through the xml.Equality trait. There are two forms
+ * of xml comparison.
+ *
+ * 1) def strict_==(other: xml.Equality)
+ *
+ * This one tries to honor the little things like symmetry
+ * and hashCode contracts. The equals method routes all
+ * comparisons through this.
+ *
+ * 2) xml_==(other: Any)
+ *
+ * This one picks up where strict_== leaves off. It might
+ * declare any two things equal.
+ *
+ * As things stood, the logic not only made a mockery of
+ * the collections equals contract, but also laid waste to
+ * that of case classes.
+ *
+ * Among the obstacles to sanity are/were:
+ *
+ * Node extends NodeSeq extends Seq[Node]
+ * MetaData extends Iterable[MetaData]
+ * The hacky "Group" xml node which throws exceptions
+ * with wild abandon, so don't get too close
+ * Rampant asymmetry and impossible hashCodes
+ * Most classes claiming to be equal to "String" if
+ * some specific stringification of it was the same.
+ * String was never going to return the favor.
+ */
+
+object Equality {
+ def asRef(x: Any): AnyRef = x.asInstanceOf[AnyRef]
+
+ /** Note - these functions assume strict equality has already failed.
+ */
+ def compareBlithely(x1: AnyRef, x2: String): Boolean = x1 match {
+ case x: Atom[_] => x.data == x2
+ case x: NodeSeq => x.text == x2
+ case _ => false
+ }
+ def compareBlithely(x1: AnyRef, x2: Node): Boolean = x1 match {
+ case x: NodeSeq if x.length == 1 => x2 == x(0)
+ case _ => false
+ }
+ def compareBlithely(x1: AnyRef, x2: AnyRef): Boolean = {
+ if (x1 == null || x2 == null)
+ return (x1 eq x2)
+
+ x2 match {
+ case s: String => compareBlithely(x1, s)
+ case n: Node => compareBlithely(x1, n)
+ case _ => false
+ }
+ }
+}
+import Equality._
+
+private[xml]
+trait Equality extends scala.Equals {
+ def basisForHashCode: Seq[Any]
+ def strict_==(other: Equality): Boolean
+ def strict_!=(other: Equality) = !strict_==(other)
+
+ private def hashOf(x: Any) = if (x == null) 1 else x.hashCode()
+
+ /** We insist we're only equal to other xml.Equality implementors,
+ * which heads off a lot of inconsistency up front.
+ */
+ override def canEqual(other: Any): Boolean = other match {
+ case x: Equality => true
+ case _ => false
+ }
+
+ /** It's be nice to make these final, but there are probably
+ * people out there subclassing the XML types, especially when
+ * it comes to equals. However WE at least can pretend they
+ * are final since clearly individual classes cannot be trusted
+ * to maintain a semblance of order.
+ */
+ override def hashCode() = basisForHashCode match {
+ case Nil => 0
+ case x :: xs => hashOf(x) * 41 + (xs map hashOf).foldLeft(0)(_ * 7 + _)
+ }
+ override def equals(other: Any) = doComparison(other, false)
+ final def xml_==(other: Any) = doComparison(other, true)
+ final def xml_!=(other: Any) = !xml_==(other)
+
+ /** The "blithe" parameter expresses the caller's unconcerned attitude
+ * regarding the usual constraints on equals. The method is thereby
+ * given carte blanche to declare any two things equal.
+ */
+ private def doComparison(other: Any, blithe: Boolean) = {
+ val strictlyEqual = other match {
+ case x: AnyRef if this eq x => true
+ case x: Equality => (x canEqual this) && (this strict_== x)
+ case _ => false
+ }
+
+ strictlyEqual || (blithe && compareBlithely(this, asRef(other)))
+ }
+}
+
diff --git a/src/library/scala/xml/Group.scala b/src/library/scala/xml/Group.scala
index 91ddba6ce6..8b714d2813 100644
--- a/src/library/scala/xml/Group.scala
+++ b/src/library/scala/xml/Group.scala
@@ -8,9 +8,7 @@
// $Id$
-
package scala.xml
-import collection.Seq
/** A hack to group XML nodes in one node for output.
*
@@ -18,49 +16,27 @@ import collection.Seq
* @version 1.0
*/
@serializable
-case class Group(val nodes: Seq[Node]) extends Node {
- // final override def doTransform = false
+final case class Group(val nodes: Seq[Node]) extends Node {
override def theSeq = nodes
- /** XXX this is ridiculous, we can't do equality like this. */
- override def equals(x: Any) = x match {
- case z:Group => (length == z.length) && sameElements(z)
- case z:Node => (length == 1) && z == apply(0)
- case z:Seq[_] => sameElements(z)
- case z:String => text == z
- case _ => false
+ override def canEqual(other: Any) = other match {
+ case x: Group => true
+ case _ => false
}
- /* As if there were a hashCode which could back up the above implementation! */
- override def hashCode = nodes.hashCode
-
- /**
- * @throws Predef.UnsupportedOperationException (always)
- */
- final def label =
- throw new UnsupportedOperationException("class Group does not support method 'label'")
-
- /**
- * @throws Predef.UnsupportedOperationException (always)
- */
- final override def attributes =
- throw new UnsupportedOperationException("class Group does not support method 'attributes'")
-
- /**
- * @throws Predef.UnsupportedOperationException (always)
- */
- final override def namespace =
- throw new UnsupportedOperationException("class Group does not support method 'namespace'")
+ override def strict_==(other: Equality) = other match {
+ case Group(xs) => nodes sameElements xs
+ case _ => false
+ }
+ override def basisForHashCode = nodes
- /**
- * @throws Predef.UnsupportedOperationException (always)
+ /** Since Group is very much a hack it throws an exception if you
+ * try to do anything with it.
*/
- final override def child =
- throw new UnsupportedOperationException("class Group does not support method 'child'")
+ private def fail(msg: String) = throw new UnsupportedOperationException("class Group does not support method '%s'" format msg)
- /**
- * @throws Predef.UnsupportedOperationException (always)
- */
- def buildString(sb: StringBuilder) =
- throw new UnsupportedOperationException(
- "class Group does not support method toString(StringBuilder)")
+ def label = fail("label")
+ override def attributes = fail("attributes")
+ override def namespace = fail("namespace")
+ override def child = fail("child")
+ def buildString(sb: StringBuilder) = fail("toString(StringBuilder)")
}
diff --git a/src/library/scala/xml/MetaData.scala b/src/library/scala/xml/MetaData.scala
index 01b1cbc1d9..744b662fb8 100644
--- a/src/library/scala/xml/MetaData.scala
+++ b/src/library/scala/xml/MetaData.scala
@@ -8,14 +8,10 @@
// $Id$
-
package scala.xml
import Utility.sbToString
import annotation.tailrec
-import collection.immutable.List
-import collection.{Seq, Iterator, Iterable}
-import collection.mutable.StringBuilder
/**
@@ -77,7 +73,7 @@ object MetaData {
* @author Burak Emir <bqe@google.com>
*/
@serializable
-abstract class MetaData extends Iterable[MetaData]
+abstract class MetaData extends Iterable[MetaData] with Equality
{
/** Updates this MetaData with the MetaData given as argument. All attributes that occur in updates
* are part of the resulting MetaData. If an attribute occurs in both this instance and
@@ -118,13 +114,6 @@ abstract class MetaData extends Iterable[MetaData]
*/
def apply(namespace_uri:String, scp:NamespaceBinding, k:String): Seq[Node]
- /**
- * @param m ...
- * @return <code>true</code> iff ...
- */
- def containedIn1(m: MetaData): Boolean =
- m != null && (m.equals1(this) || containedIn1(m.next))
-
/** returns a copy of this MetaData item with next field set to argument.
*
* @param next ...
@@ -143,22 +132,20 @@ abstract class MetaData extends Iterable[MetaData]
def isPrefixed: Boolean
- /** deep equals method - XXX */
- override def equals(that: Any) = that match {
- case m: MetaData =>
- (this.length == m.length) &&
- (this.hashCode == m.hashCode) &&
- (this forall (_ containedIn1 m))
+ override def canEqual(other: Any) = other match {
+ case _: MetaData => true
+ case _ => false
+ }
+ override def strict_==(other: Equality) = other match {
+ case m: MetaData => this.toSet == m.toSet
case _ => false
}
+ def basisForHashCode: Seq[Any] = List(this.toSet)
/** Returns an iterator on attributes */
- def iterator: Iterator[MetaData] = Iterator.iterate(this)(_.next) takeWhile (_ != Null)
+ def iterator: Iterator[MetaData] = Iterator.single(this) ++ next.iterator
override def size: Int = 1 + iterator.length
- /** shallow equals method */
- def equals1(that: MetaData): Boolean
-
/** filters this sequence of meta data */
override def filter(f: MetaData => Boolean): MetaData =
if (f(this)) copy(next filter f)
@@ -170,8 +157,18 @@ abstract class MetaData extends Iterable[MetaData]
/** returns value of this MetaData item */
def value: Seq[Node]
- /** maps this sequence of meta data */
- def map(f: MetaData => Text): List[Text] = (iterator map f).toList
+ /** Returns a String containing "prefix:key" if the first key is
+ * prefixed, and "key" otherwise.
+ */
+ def prefixedKey = this match {
+ case x: Attribute if x.isPrefixed => x.pre + ":" + key
+ case _ => key
+ }
+
+ /** Returns a Map containing the attributes stored as key/value pairs.
+ */
+ def asAttrMap: Map[String, String] =
+ iterator map (x => (x.prefixedKey, x.value.text)) toMap
/** returns Null or the next MetaData item */
def next: MetaData
@@ -198,8 +195,6 @@ abstract class MetaData extends Iterable[MetaData]
final def get(uri: String, scope: NamespaceBinding, key: String): Option[Seq[Node]] =
Option(apply(uri, scope, key))
- override def hashCode(): Int
-
def toString1(): String = sbToString(toString1)
// appends string representations of single attribute to StringBuilder
diff --git a/src/library/scala/xml/NamespaceBinding.scala b/src/library/scala/xml/NamespaceBinding.scala
index 7381f0129b..47ca8fed87 100644
--- a/src/library/scala/xml/NamespaceBinding.scala
+++ b/src/library/scala/xml/NamespaceBinding.scala
@@ -12,7 +12,7 @@
package scala.xml
import Utility.sbToString
-import collection.mutable.StringBuilder
+
/** The class <code>NamespaceBinding</code> represents namespace bindings
* and scopes. The binding for the default namespace is treated as a null
@@ -23,7 +23,7 @@ import collection.mutable.StringBuilder
* @version 1.0
*/
@SerialVersionUID(0 - 2518644165573446725L)
-case class NamespaceBinding(prefix: String, uri: String, parent: NamespaceBinding) extends AnyRef
+case class NamespaceBinding(prefix: String, uri: String, parent: NamespaceBinding) extends AnyRef with Equality
{
if (prefix == "")
throw new IllegalArgumentException("zero length prefix not allowed")
@@ -41,6 +41,15 @@ case class NamespaceBinding(prefix: String, uri: String, parent: NamespaceBindin
if (_uri == uri) prefix else parent getPrefix _uri
override def toString(): String = sbToString(buildString(_, TopScope))
+ override def canEqual(other: Any) = other match {
+ case _: NamespaceBinding => true
+ case _ => false
+ }
+ override def strict_==(other: Equality) = other match {
+ case x: NamespaceBinding => (prefix == x.prefix) && (uri == x.uri) && (parent == x.parent)
+ case _ => false
+ }
+ def basisForHashCode: Seq[Any] = List(prefix, uri, parent)
def buildString(stop: NamespaceBinding): String = sbToString(buildString(_, stop))
def buildString(sb: StringBuilder, stop: NamespaceBinding): Unit = {
diff --git a/src/library/scala/xml/Node.scala b/src/library/scala/xml/Node.scala
index f206140fd4..5117bb9282 100644
--- a/src/library/scala/xml/Node.scala
+++ b/src/library/scala/xml/Node.scala
@@ -8,13 +8,8 @@
// $Id$
-
package scala.xml
-import collection.Seq
-import collection.immutable.{List, Nil}
-import collection.mutable.StringBuilder
-
/**
* This object provides methods ...
*
@@ -22,7 +17,6 @@ import collection.mutable.StringBuilder
* @version 1.0
*/
object Node {
-
/** the constant empty attribute sequence */
final def NoAttributes: MetaData = Null
@@ -30,7 +24,6 @@ object Node {
val EmptyNamespace = ""
def unapplySeq(n: Node) = Some((n.label, n.attributes, n.child))
-
}
/**
@@ -116,6 +109,10 @@ abstract class Node extends NodeSeq {
*/
def child: Seq[Node]
+ /** Children which do not stringify to "" (needed for equality)
+ */
+ def nonEmptyChildren: Seq[Node] = child filterNot (_.toString == "")
+
/**
* Descendant axis (all descendants of this node, not including node itself)
* includes all text nodes, element nodes, comments and processing instructions.
@@ -129,41 +126,24 @@ abstract class Node extends NodeSeq {
*/
def descendant_or_self: List[Node] = this :: descendant
- /**
- * Returns true if x is structurally equal to this node. Compares prefix,
- * label, attributes and children.
- *
- * @param x ...
- * @return <code>true</code> if ..
- */
- override def equals(x: Any): Boolean = x match {
- case g: Group => false
- case that: Node =>
- this.prefix == that.prefix &&
- this.label == that.label &&
- this.attributes == that.attributes &&
- this.scope == that.scope &&
- equalChildren(that)
+ override def canEqual(other: Any) = other match {
+ case x: Group => false
+ case x: Node => true
case _ => false
}
-
- // children comparison has to be done carefully - see bug #1773.
- // It would conceivably be a better idea for a scala block which
- // generates the empty string not to generate a child rather than
- // our having to filter it later, but that approach would be more
- // delicate to implement.
- private def equalChildren(that: Node) = {
- def noEmpties(xs: Seq[Node]) = xs filter (_.toString() != "")
- noEmpties(this.child) sameElements noEmpties(that.child)
+ override def basisForHashCode: Seq[Any] = prefix :: label :: attributes :: nonEmptyChildren.toList
+ override def strict_==(other: Equality) = other match {
+ case _: Group => false
+ case x: Node =>
+ (prefix == x.prefix) &&
+ (label == x.label) &&
+ (attributes == x.attributes) &&
+ // (scope == x.scope) // note - original code didn't compare scopes so I left it as is.
+ (nonEmptyChildren sameElements x.nonEmptyChildren)
+ case _ =>
+ false
}
- /** <p>
- * Returns a hashcode.
- * </p>
- */
- override def hashCode(): Int =
- Utility.hashCode(prefix, label, attributes.hashCode(), scope.hashCode(), child)
-
// implementations of NodeSeq methods
/**
@@ -213,9 +193,10 @@ abstract class Node extends NodeSeq {
* Martin to Burak: to do: if you make this method abstract, the compiler will now
* complain if there's no implementation in a subclass. Is this what we want? Note that
* this would break doc/DocGenator and doc/ModelToXML, with an error message like:
-doc\DocGenerator.scala:1219: error: object creation impossible, since there is a deferred declaration of method text in class Node of type => String which is not implemented in a subclass
- new SpecialNode {
- ^
- */
+ * {{{
+ * doc\DocGenerator.scala:1219: error: object creation impossible, since there is a deferred declaration of method text in class Node of type => String which is not implemented in a subclass
+ * new SpecialNode {
+ * ^
+ * }}} */
override def text: String = super.text
}
diff --git a/src/library/scala/xml/NodeBuffer.scala b/src/library/scala/xml/NodeBuffer.scala
index 49efe0a5ca..2cf999e8a4 100644
--- a/src/library/scala/xml/NodeBuffer.scala
+++ b/src/library/scala/xml/NodeBuffer.scala
@@ -8,11 +8,8 @@
// $Id$
-
package scala.xml
-import collection.{Iterator, Seq, Iterable}
-
/**
* <p>
* This class acts as a Buffer for nodes. If it is used as a sequence
diff --git a/src/library/scala/xml/NodeSeq.scala b/src/library/scala/xml/NodeSeq.scala
index 17ea9228f6..3b56ba25e4 100644
--- a/src/library/scala/xml/NodeSeq.scala
+++ b/src/library/scala/xml/NodeSeq.scala
@@ -11,11 +11,9 @@
package scala.xml
-import collection.immutable
-import collection.immutable.{List, Nil, ::}
-import collection.{Seq, SeqLike}
-import collection.mutable.{Builder, ListBuffer}
-import collection.generic.CanBuildFrom
+import collection.{ mutable, immutable, generic, SeqLike }
+import mutable.{ Builder, ListBuffer }
+import generic.{ CanBuildFrom }
/** This object ...
*
@@ -43,7 +41,7 @@ object NodeSeq {
* @author Burak Emir
* @version 1.0
*/
-abstract class NodeSeq extends immutable.Seq[Node] with SeqLike[Node, NodeSeq] {
+abstract class NodeSeq extends immutable.Seq[Node] with SeqLike[Node, NodeSeq] with Equality {
import NodeSeq.seqToNodeSeq // import view magic for NodeSeq wrappers
/** Creates a list buffer as builder for this class */
@@ -56,12 +54,23 @@ abstract class NodeSeq extends immutable.Seq[Node] with SeqLike[Node, NodeSeq] {
def apply(i: Int): Node = theSeq(i)
def apply(f: Node => Boolean): NodeSeq = filter(f)
- /** structural equality (XXX - this shatters any hope of hashCode equality) */
- override def equals(x: Any): Boolean = x match {
- case z:Node => (length == 1) && z == apply(0)
- case z:Seq[_] => sameElements(z)
- case z:String => text == z
- case _ => false
+ def xml_sameElements[A](that: Iterable[A]): Boolean = {
+ val these = this.iterator
+ val those = that.iterator
+ while (these.hasNext && those.hasNext)
+ if (these.next xml_!= those.next)
+ return false
+
+ !these.hasNext && !those.hasNext
+ }
+ def basisForHashCode: Seq[Any] = theSeq
+ override def canEqual(other: Any) = other match {
+ case _: NodeSeq => true
+ case _ => false
+ }
+ override def strict_==(other: Equality) = other match {
+ case x: NodeSeq => (length == x.length) && (theSeq sameElements x.theSeq)
+ case _ => false
}
/** Projection function. Similar to XPath, use <code>this \ "foo"</code>
@@ -80,8 +89,8 @@ abstract class NodeSeq extends immutable.Seq[Node] with SeqLike[Node, NodeSeq] {
* @return ...
*/
def \(that: String): NodeSeq = {
+ def fail = throw new IllegalArgumentException(that)
def atResult = {
- def fail = throw new IllegalArgumentException(that)
lazy val y = this(0)
val attr =
if (that.length == 1) fail
@@ -92,7 +101,7 @@ abstract class NodeSeq extends immutable.Seq[Node] with SeqLike[Node, NodeSeq] {
if (uri == "" || key == "") fail
else y.attribute(uri, key)
}
- else y.attribute(that.substring(1))
+ else y.attribute(that drop 1)
attr match {
case Some(x) => Group(x)
@@ -104,6 +113,7 @@ abstract class NodeSeq extends immutable.Seq[Node] with SeqLike[Node, NodeSeq] {
NodeSeq fromSeq (this flatMap (_.child) filter cond)
that match {
+ case "" => fail
case "_" => makeSeq(!_.isAtom)
case _ if (that(0) == '@' && this.length == 1) => atResult
case _ => makeSeq(_.label == that)
diff --git a/src/library/scala/xml/Null.scala b/src/library/scala/xml/Null.scala
index a3246d4b57..d6f06fc3cd 100644
--- a/src/library/scala/xml/Null.scala
+++ b/src/library/scala/xml/Null.scala
@@ -12,70 +12,49 @@
package scala.xml
import Utility.{ isNameStart }
-import collection.Iterator
-import collection.immutable.{Nil, List}
-import collection.mutable.StringBuilder
+/** Essentially, every method in here is a dummy, returning Zero[T].
+ * It provides a backstop for the unusual collection defined by MetaData,
+ * sort of a linked list of tails.
+ */
case object Null extends MetaData {
-
- /** appends given MetaData items to this MetaData list */
- override def append(m: MetaData, scope: NamespaceBinding = TopScope): MetaData = m
-
- override def containedIn1(m: MetaData): Boolean = false
-
- /** returns its argument */
- def copy(next: MetaData) = next
-
override def iterator = Iterator.empty
-
+ override def append(m: MetaData, scope: NamespaceBinding = TopScope): MetaData = m
override def filter(f: MetaData => Boolean): MetaData = this
+ def copy(next: MetaData) = next
def getNamespace(owner: Node) = null
- final override def hasNext = false
+ override def hasNext = false
def next = null
def key = null
def value = null
-
- final override def length = 0
- final override def length(i: Int) = i
-
def isPrefixed = false
- /** deep equals method - XXX */
- override def equals(that: Any) = that match {
- case m: MetaData => m.length == 0
- case _ => false
- }
+ override def length = 0
+ override def length(i: Int) = i
- def equals1(that:MetaData) = that.length == 0
-
- override def map(f: MetaData => Text): List[Text] = Nil
+ override def strict_==(other: Equality) = other match {
+ case x: MetaData => x.length == 0
+ case _ => false
+ }
+ override def basisForHashCode: Seq[Any] = Nil
- /** null */
+ def apply(namespace: String, scope: NamespaceBinding, key: String) = null
def apply(key: String) = {
- if(!isNameStart(key charAt 0))
+ if (!isNameStart(key.head))
throw new IllegalArgumentException("not a valid attribute name '"+key+"', so can never match !")
+
null
}
- /** gets value of qualified (prefixed) attribute with given key */
- def apply(namespace: String, scope: NamespaceBinding, key: String) = null
-
- override def hashCode(): Int = 0
-
+ def toString1(sb: StringBuilder) = ()
override def toString1(): String = ""
-
- //appends string representations of single attribute to StringBuilder
- def toString1(sb:StringBuilder) = {}
-
override def toString(): String = ""
override def buildString(sb: StringBuilder): StringBuilder = sb
-
override def wellformed(scope: NamespaceBinding) = true
def remove(key: String) = this
-
def remove(namespace: String, scope: NamespaceBinding, key: String) = this
}
diff --git a/src/library/scala/xml/PCData.scala b/src/library/scala/xml/PCData.scala
index 5cf4bda070..fa44591496 100644
--- a/src/library/scala/xml/PCData.scala
+++ b/src/library/scala/xml/PCData.scala
@@ -7,16 +7,9 @@ package scala.xml
* and is to be preserved as CDATA section in the output.
*/
case class PCData(_data: String) extends Atom[String](_data) {
- /* The following code is a derivative work of scala.xml.Text */
if (null == data)
throw new IllegalArgumentException("tried to construct PCData with null")
- final override def equals(x: Any) = x match {
- case s: String => s.equals(data)
- case s: Atom[_] => data == s.data
- case _ => false
- }
-
/** Returns text, with some characters escaped according to the XML
* specification.
*
diff --git a/src/library/scala/xml/PrefixedAttribute.scala b/src/library/scala/xml/PrefixedAttribute.scala
index a465e61ba3..d7c04ab6ad 100644
--- a/src/library/scala/xml/PrefixedAttribute.scala
+++ b/src/library/scala/xml/PrefixedAttribute.scala
@@ -8,13 +8,8 @@
// $Id$
-
package scala.xml
-import collection.Seq
-import collection.mutable.StringBuilder
-
-
/** prefixed attributes always have a non-null namespace.
*
* @param pre ...
@@ -36,24 +31,12 @@ extends Attribute
def this(pre: String, key: String, value: String, next: MetaData) =
this(pre, key, Text(value), next)
- /*
- // the problem here is the fact that we cannot remove the proper attribute from
- // next, and thus cannot guarantee that hashcodes are computed properly
- def this(pre: String, key: String, value: scala.AllRef, next: MetaData) =
- throw new UnsupportedOperationException("can't construct prefixed nil attributes")
- */
-
/** Returns a copy of this unprefixed attribute with the given
* next field.
*/
def copy(next: MetaData) =
new PrefixedAttribute(pre, key, value, next)
- def equals1(m: MetaData) =
- (m.isPrefixed &&
- (m.asInstanceOf[PrefixedAttribute].pre == pre) &&
- (m.key == key) && (m.value sameElements value))
-
def getNamespace(owner: Node) =
owner.getNamespace(pre)
@@ -68,41 +51,8 @@ extends Attribute
else
next(namespace, scope, key)
}
-
- /** returns true */
- final def isPrefixed = true
-
- /** returns the hashcode.
- */
- override def hashCode() =
- pre.hashCode() * 41 + key.hashCode() * 7 + next.hashCode()
-
-
- /** appends string representation of only this attribute to stringbuffer */
- def toString1(sb:StringBuilder): Unit = if(value ne null) {
- sb.append(pre)
- sb.append(':')
- sb.append(key)
- sb.append('=')
- val sb2 = new StringBuilder()
- Utility.sequenceToXML(value, TopScope, sb2, true)
- Utility.appendQuoted(sb2.toString(), sb)
- }
-
- def wellformed(scope: NamespaceBinding): Boolean =
- (null == next(scope.getURI(pre), scope, key) &&
- next.wellformed(scope))
-
- def remove(key: String) =
- copy(next.remove(key))
-
- def remove(namespace: String, scope: NamespaceBinding, key: String): MetaData =
- if (key == this.key && scope.getURI(pre) == namespace)
- next
- else
- next.remove(namespace, scope, key)
-
}
+
object PrefixedAttribute {
def unapply(x: PrefixedAttribute) = Some(x.pre, x.key, x.value, x.next)
}
diff --git a/src/library/scala/xml/PrettyPrinter.scala b/src/library/scala/xml/PrettyPrinter.scala
index 1fb922eb95..77199ca367 100644
--- a/src/library/scala/xml/PrettyPrinter.scala
+++ b/src/library/scala/xml/PrettyPrinter.scala
@@ -8,10 +8,8 @@
// $Id$
-
package scala.xml
-import scala.collection.Map
import Utility.sbToString
/** Class for pretty printing. After instantiating, you can use the
@@ -23,7 +21,7 @@ import Utility.sbToString
* @version 1.0
*
* @param width the width to fit the output into
- * @step indentation
+ * @param step indentation
*/
class PrettyPrinter(width: Int, step: Int) {
@@ -39,7 +37,6 @@ class PrettyPrinter(width: Int, step: Int) {
protected var items: List[Item] = Nil
protected var cur = 0
- //protected var pmap:Map[String,String] = _
protected def reset() = {
cur = 0
diff --git a/src/library/scala/xml/ProcInstr.scala b/src/library/scala/xml/ProcInstr.scala
index 5a4e67e647..051fd499f4 100644
--- a/src/library/scala/xml/ProcInstr.scala
+++ b/src/library/scala/xml/ProcInstr.scala
@@ -9,7 +9,6 @@
// $Id$
package scala.xml
-import collection.mutable.StringBuilder
/** an XML node for processing instructions (PI)
*
diff --git a/src/library/scala/xml/SpecialNode.scala b/src/library/scala/xml/SpecialNode.scala
index d40d829c4b..1688cd1e15 100644
--- a/src/library/scala/xml/SpecialNode.scala
+++ b/src/library/scala/xml/SpecialNode.scala
@@ -8,12 +8,8 @@
// $Id$
-
package scala.xml
-import collection.immutable.{List, Nil, ::}
-import collection.mutable.StringBuilder
-
/** <p>
* <code>SpecialNode</code> is a special XML node which
* represents either text (PCDATA), a comment, a PI, or an entity ref.
diff --git a/src/library/scala/xml/Text.scala b/src/library/scala/xml/Text.scala
index 3090883bb8..5f0b010c9f 100644
--- a/src/library/scala/xml/Text.scala
+++ b/src/library/scala/xml/Text.scala
@@ -8,11 +8,8 @@
// $Id$
-
package scala.xml
-import collection.mutable.StringBuilder
-
// XXX This attempt to make Text not a case class revealed a bug in the pattern
// matcher (see ticket #2883) so I've put the case back. (It was/is desirable that
// it not be a case class because it is using the antipattern of passing constructor
@@ -42,13 +39,6 @@ case class Text(_data: String) extends Atom[String](_data)
if (_data == null)
throw new IllegalArgumentException("tried to construct Text with null")
- /** XXX More hashCode flailing. */
- final override def equals(x: Any) = x match {
- case s:String => s == data
- case s:Atom[_] => data == s.data
- case _ => false
- }
-
/** Returns text, with some characters escaped according to the XML
* specification.
*
diff --git a/src/library/scala/xml/TextBuffer.scala b/src/library/scala/xml/TextBuffer.scala
index 17c40aad2f..84c6c24146 100644
--- a/src/library/scala/xml/TextBuffer.scala
+++ b/src/library/scala/xml/TextBuffer.scala
@@ -8,12 +8,8 @@
// $Id$
-
package scala.xml
-import collection.Seq
-import collection.mutable.StringBuilder
-import collection.immutable.{List, Nil, ::}
import Utility.isSpace
object TextBuffer {
diff --git a/src/library/scala/xml/TopScope.scala b/src/library/scala/xml/TopScope.scala
index c638b80b2d..8b3c1383c9 100644
--- a/src/library/scala/xml/TopScope.scala
+++ b/src/library/scala/xml/TopScope.scala
@@ -10,8 +10,6 @@
package scala.xml
-import collection.mutable.StringBuilder
-
/** top level namespace scope. only contains the predefined binding
* for the &quot;xml&quot; prefix which is bound to
* &quot;http://www.w3.org/XML/1998/namespace&quot;
diff --git a/src/library/scala/xml/Unparsed.scala b/src/library/scala/xml/Unparsed.scala
index a570b83fb5..d3c63172e8 100644
--- a/src/library/scala/xml/Unparsed.scala
+++ b/src/library/scala/xml/Unparsed.scala
@@ -22,13 +22,6 @@ class Unparsed(data: String) extends Atom[String](data)
if (null == data)
throw new IllegalArgumentException("tried to construct Unparsed with null")
- /** XXX another hashCode fail */
- final override def equals(x: Any) = x match {
- case s:String => s == data
- case s:Atom[_] => data == s.data
- case _ => false
- }
-
/** returns text, with some characters escaped according to XML spec */
override def buildString(sb: StringBuilder) = sb append data
}
diff --git a/src/library/scala/xml/UnprefixedAttribute.scala b/src/library/scala/xml/UnprefixedAttribute.scala
index 283cc3a1d0..a8720f13e1 100644
--- a/src/library/scala/xml/UnprefixedAttribute.scala
+++ b/src/library/scala/xml/UnprefixedAttribute.scala
@@ -8,13 +8,8 @@
// $Id$
-
package scala.xml
-import collection.Seq
-import collection.mutable.StringBuilder
-
-
/** Unprefixed attributes have the null namespace, and no prefix field
*
* @author Burak Emir
@@ -25,6 +20,7 @@ class UnprefixedAttribute(
next1: MetaData)
extends Attribute
{
+ final val pre = null
val next = if (value ne null) next1 else next1.remove(key)
/** same as this(key, Text(value), next) */
@@ -38,9 +34,6 @@ extends Attribute
/** returns a copy of this unprefixed attribute with the given next field*/
def copy(next: MetaData) = new UnprefixedAttribute(key, value, next)
- def equals1(m: MetaData) =
- !m.isPrefixed && (m.key == key) && (m.value sameElements value)
-
final def getNamespace(owner: Node): String = null
/**
@@ -62,33 +55,6 @@ extends Attribute
*/
def apply(namespace: String, scope: NamespaceBinding, key: String): Seq[Node] =
next(namespace, scope, key)
-
- override def hashCode() =
- key.hashCode() * 7 + { if(value ne null) value.hashCode() * 53 else 0 } + next.hashCode()
-
- final def isPrefixed = false
-
- /** appends string representation of only this attribute to stringbuffer.
- *
- * @param sb ..
- */
- def toString1(sb: StringBuilder): Unit = if (value ne null) {
- sb.append(key)
- sb.append('=')
- val sb2 = new StringBuilder()
- Utility.sequenceToXML(value, TopScope, sb2, true)
- Utility.appendQuoted(sb2.toString(), sb)
- }
-
- def wellformed(scope: NamespaceBinding): Boolean =
- (null == next(null, scope, key)) && next.wellformed(scope)
-
- def remove(key: String) =
- if (this.key == key) next else copy(next.remove(key))
-
- def remove(namespace: String, scope: NamespaceBinding, key: String): MetaData =
- next.remove(namespace, scope, key)
-
}
object UnprefixedAttribute {
def unapply(x: UnprefixedAttribute) = Some(x.key, x.value, x.next)
diff --git a/src/library/scala/xml/Utility.scala b/src/library/scala/xml/Utility.scala
index 1cfe9c79c9..48a23dc389 100644
--- a/src/library/scala/xml/Utility.scala
+++ b/src/library/scala/xml/Utility.scala
@@ -11,8 +11,8 @@
package scala.xml
-import collection.mutable.{Set, HashSet, StringBuilder}
-import collection.Seq
+import collection.mutable
+import mutable.{ Set, HashSet }
import parsing.XhtmlEntities
/**
@@ -84,7 +84,7 @@ object Utility extends AnyRef with parsing.TokenTests
object Escapes {
/** For reasons unclear escape and unescape are a long ways from
- being logical inverses. */
+ * being logical inverses. */
val pairs = Map(
"lt" -> '<',
"gt" -> '>',
@@ -106,11 +106,29 @@ object Utility extends AnyRef with parsing.TokenTests
* @param s ...
* @return ...
*/
- final def escape(text: String, s: StringBuilder): StringBuilder =
- text.foldLeft(s)((s, c) => escMap.get(c) match {
- case Some(str) => s append str
- case None => s append c
- })
+ final def escape(text: String, s: StringBuilder): StringBuilder = {
+ // Implemented per XML spec:
+ // http://www.w3.org/International/questions/qa-controls
+ // imperative code 3x-4x faster than current implementation
+ // dpp (David Pollak) 2010/02/03
+ val len = text.length
+ var pos = 0
+ while (pos < len) {
+ text.charAt(pos) match {
+ case '<' => s.append("&lt;")
+ case '>' => s.append("&gt;")
+ case '&' => s.append("&amp;")
+ case '"' => s.append("&quot;")
+ case '\n' => s.append('\n')
+ case '\r' => s.append('\r')
+ case '\t' => s.append('\t')
+ case c => if (c >= ' ') s.append(c)
+ }
+
+ pos += 1
+ }
+ s
+ }
/**
* Appends unescaped string to <code>s</code>, amp becomes &amp;
@@ -131,7 +149,7 @@ object Utility extends AnyRef with parsing.TokenTests
* @param nodes ...
* @return ...
*/
- def collectNamespaces(nodes: Seq[Node]): Set[String] =
+ def collectNamespaces(nodes: Seq[Node]): mutable.Set[String] =
nodes.foldLeft(new HashSet[String]) { (set, x) => collectNamespaces(x, set) ; set }
/**
@@ -140,7 +158,7 @@ object Utility extends AnyRef with parsing.TokenTests
* @param n ...
* @param set ...
*/
- def collectNamespaces(n: Node, set: Set[String]) {
+ def collectNamespaces(n: Node, set: mutable.Set[String]) {
if (n.doCollectNamespaces) {
set += n.namespace
for (a <- n.attributes) a match {
diff --git a/src/library/scala/xml/XML.scala b/src/library/scala/xml/XML.scala
index 1e85d4ae06..dd85b58e50 100644
--- a/src/library/scala/xml/XML.scala
+++ b/src/library/scala/xml/XML.scala
@@ -11,12 +11,10 @@
package scala.xml
-import scala.xml.parsing.NoBindingFactoryAdapter
-import scala.xml.factory.XMLLoader
-import org.xml.sax.InputSource
-import javax.xml.parsers.{ SAXParser, SAXParserFactory }
-import java.io.{File, FileDescriptor, FileInputStream, FileOutputStream}
-import java.io.{InputStream, Reader, StringReader, Writer}
+import parsing.NoBindingFactoryAdapter
+import factory.XMLLoader
+import java.io.{ File, FileDescriptor, FileInputStream, FileOutputStream }
+import java.io.{ InputStream, Reader, StringReader, Writer }
import java.nio.channels.Channels
import scala.util.control.Exception.ultimately
@@ -56,11 +54,11 @@ object XML extends XMLLoader[Elem]
@deprecated("Use save() instead")
final def saveFull(filename: String, node: Node, xmlDecl: Boolean, doctype: dtd.DocType): Unit =
- saveFull(filename, node, encoding, xmlDecl, doctype)
+ save(filename, node, encoding, xmlDecl, doctype)
@deprecated("Use save() instead")
final def saveFull(filename: String, node: Node, enc: String, xmlDecl: Boolean, doctype: dtd.DocType): Unit =
- saveFull(filename, node, enc, xmlDecl, doctype)
+ save(filename, node, enc, xmlDecl, doctype)
/** Saves a node to a file with given filename using given encoding
* optionally with xmldecl and doctype declaration.
@@ -82,7 +80,7 @@ object XML extends XMLLoader[Elem]
val fos = new FileOutputStream(filename)
val w = Channels.newWriter(fos.getChannel(), enc)
- ultimately({ w.close() ; fos.close() })(
+ ultimately(w.close())(
write(w, node, enc, xmlDecl, doctype)
)
}
diff --git a/src/library/scala/xml/dtd/ContentModel.scala b/src/library/scala/xml/dtd/ContentModel.scala
index d5d4b95cce..772d8ec599 100644
--- a/src/library/scala/xml/dtd/ContentModel.scala
+++ b/src/library/scala/xml/dtd/ContentModel.scala
@@ -13,10 +13,7 @@ package scala.xml
package dtd
import util.regexp.WordExp
-import util.automata.{DetWordAutom, SubsetConstruction, WordBerrySethi}
-import collection.mutable.{HashSet, StringBuilder}
-import collection.immutable.{List, Nil}
-import collection.Seq
+import util.automata._
import Utility.sbToString
import PartialFunction._
diff --git a/src/library/scala/xml/dtd/ContentModelParser.scala b/src/library/scala/xml/dtd/ContentModelParser.scala
index c260a9fc46..2b0df3f6a5 100644
--- a/src/library/scala/xml/dtd/ContentModelParser.scala
+++ b/src/library/scala/xml/dtd/ContentModelParser.scala
@@ -8,12 +8,9 @@
// $Id$
-
package scala.xml
package dtd
-import collection.immutable.List
-
/** Parser for regexps (content models in DTD element declarations) */
object ContentModelParser extends Scanner { // a bit too permissive concerning #PCDATA
diff --git a/src/library/scala/xml/dtd/DTD.scala b/src/library/scala/xml/dtd/DTD.scala
index 14c16f8489..0fde1188f3 100644
--- a/src/library/scala/xml/dtd/DTD.scala
+++ b/src/library/scala/xml/dtd/DTD.scala
@@ -8,31 +8,25 @@
// $Id$
-
package scala.xml
package dtd
-import scala.collection.mutable.{HashMap, Map}
+import collection.mutable
+import mutable.HashMap
/** A document type declaration.
*
* @author Burak Emir
*/
abstract class DTD {
-
- var externalID: ExternalID = null
-
- def notations: Seq[NotationDecl] = Nil
-
+ var externalID: ExternalID = null
+ var decls: List[Decl] = Nil
+ def notations: Seq[NotationDecl] = Nil
def unparsedEntities: Seq[EntityDecl] = Nil
- var elem: Map[String, ElemDecl] = new HashMap[String, ElemDecl]()
-
- var attr: Map[String, AttListDecl] = new HashMap[String, AttListDecl]()
-
- var ent: Map[String, EntityDecl] = new HashMap[String, EntityDecl]()
-
- var decls: List[Decl] = Nil
+ var elem: mutable.Map[String, ElemDecl] = new HashMap[String, ElemDecl]()
+ var attr: mutable.Map[String, AttListDecl] = new HashMap[String, AttListDecl]()
+ var ent: mutable.Map[String, EntityDecl] = new HashMap[String, EntityDecl]()
override def toString() =
"DTD [\n%s%s]".format(
diff --git a/src/library/scala/xml/dtd/Decl.scala b/src/library/scala/xml/dtd/Decl.scala
index 25ee30b356..2ac3d42a67 100644
--- a/src/library/scala/xml/dtd/Decl.scala
+++ b/src/library/scala/xml/dtd/Decl.scala
@@ -8,14 +8,10 @@
// $Id$
-
package scala.xml
package dtd
import Utility.sbToString
-import collection.immutable.List
-import collection.mutable.StringBuilder
-
abstract class Decl
@@ -114,7 +110,7 @@ case class IntDef(value:String) extends EntityDef {
val n = tmp.substring(ix, iz);
if( !Utility.isName( n ))
- throw new IllegalArgumentException("interal entity def: \""+n+"\" must be an XML Name");
+ throw new IllegalArgumentException("internal entity def: \""+n+"\" must be an XML Name");
tmp = tmp.substring(iz+1, tmp.length());
ix = tmp.indexOf('%');
diff --git a/src/library/scala/xml/dtd/DocType.scala b/src/library/scala/xml/dtd/DocType.scala
index dab1d9ff6b..7da38b3e73 100644
--- a/src/library/scala/xml/dtd/DocType.scala
+++ b/src/library/scala/xml/dtd/DocType.scala
@@ -8,12 +8,9 @@
// $Id$
-
package scala.xml
package dtd
-import collection.Seq
-
/** An XML node for document type declaration.
*
* @author Burak Emir
diff --git a/src/library/scala/xml/dtd/ElementValidator.scala b/src/library/scala/xml/dtd/ElementValidator.scala
index cc37e2b527..9ebed8d87c 100644
--- a/src/library/scala/xml/dtd/ElementValidator.scala
+++ b/src/library/scala/xml/dtd/ElementValidator.scala
@@ -95,7 +95,7 @@ class ElementValidator() extends Function1[Node,Boolean] {
}
/** check children, return true if conform to content model
- * @pre contentModel != null
+ * @note contentModel != null
*/
def check(nodes: Seq[Node]): Boolean = contentModel match {
case ANY => true
@@ -120,7 +120,7 @@ class ElementValidator() extends Function1[Node,Boolean] {
}
/** applies various validations - accumulates error messages in exc
- * @todo: fail on first error, ignore other errors (rearranging conditions)
+ * @todo fail on first error, ignore other errors (rearranging conditions)
*/
def apply(n: Node): Boolean =
//- ? check children
diff --git a/src/library/scala/xml/dtd/ExternalID.scala b/src/library/scala/xml/dtd/ExternalID.scala
index 784273083a..b0d311e54a 100644
--- a/src/library/scala/xml/dtd/ExternalID.scala
+++ b/src/library/scala/xml/dtd/ExternalID.scala
@@ -8,14 +8,9 @@
// $Id$
-
package scala.xml
package dtd
-import collection.immutable.{List, Nil}
-import collection.mutable.StringBuilder
-
-
/** an ExternalIDs - either PublicID or SystemID
*
* @author Burak Emir
diff --git a/src/library/scala/xml/dtd/Scanner.scala b/src/library/scala/xml/dtd/Scanner.scala
index 1a45a186a0..7b3e2acfe0 100644
--- a/src/library/scala/xml/dtd/Scanner.scala
+++ b/src/library/scala/xml/dtd/Scanner.scala
@@ -8,13 +8,9 @@
// $Id$
-
package scala.xml
package dtd
-import collection.{Seq, Iterator}
-import collection.immutable.{List, Nil}
-
/** Scanner for regexps (content models in DTD element declarations)
* todo: cleanup
*/
diff --git a/src/library/scala/xml/factory/Binder.scala b/src/library/scala/xml/factory/Binder.scala
index caad009b9b..3996ef2d36 100644
--- a/src/library/scala/xml/factory/Binder.scala
+++ b/src/library/scala/xml/factory/Binder.scala
@@ -12,7 +12,7 @@
package scala.xml
package factory
-import scala.xml.parsing.ValidatingMarkupHandler
+import parsing.ValidatingMarkupHandler
/**
* @author Burak Emir
diff --git a/src/library/scala/xml/factory/NodeFactory.scala b/src/library/scala/xml/factory/NodeFactory.scala
index 45dac6ccda..2dd52242db 100644
--- a/src/library/scala/xml/factory/NodeFactory.scala
+++ b/src/library/scala/xml/factory/NodeFactory.scala
@@ -12,11 +12,7 @@ package scala.xml
package factory
import parsing.{ FactoryAdapter, NoBindingFactoryAdapter }
-import collection.Seq
-import collection.immutable.{List, Nil}
-import org.xml.sax.InputSource
import java.io.{ InputStream, Reader, StringReader, File, FileDescriptor, FileInputStream }
-import javax.xml.parsers.{ SAXParser, SAXParserFactory }
trait NodeFactory[A <: Node]
{
diff --git a/src/library/scala/xml/factory/XMLLoader.scala b/src/library/scala/xml/factory/XMLLoader.scala
index a1bca21b40..8bb0cf4188 100644
--- a/src/library/scala/xml/factory/XMLLoader.scala
+++ b/src/library/scala/xml/factory/XMLLoader.scala
@@ -11,11 +11,9 @@
package scala.xml
package factory
+import javax.xml.parsers.SAXParserFactory
import parsing.{ FactoryAdapter, NoBindingFactoryAdapter }
-import org.xml.sax.InputSource
import java.io.{ InputStream, Reader, StringReader, File, FileDescriptor, FileInputStream }
-import javax.xml.parsers.{ SAXParser, SAXParserFactory }
-import java.net.URL
/** Presents collection of XML loading methods which use the parser
* created by "def parser".
diff --git a/src/library/scala/xml/include/XIncludeException.scala b/src/library/scala/xml/include/XIncludeException.scala
index 26c66f9b1d..a671f32dca 100644
--- a/src/library/scala/xml/include/XIncludeException.scala
+++ b/src/library/scala/xml/include/XIncludeException.scala
@@ -43,7 +43,7 @@ class XIncludeException(message: String) extends Exception(message) {
* This method allows you to store the original exception.
*
* @param nestedException the underlying exception which
- caused the XIncludeException to be thrown
+ * caused the XIncludeException to be thrown
*/
def setRootCause(nestedException: Throwable ) {
this.rootCause = nestedException
diff --git a/src/library/scala/xml/include/sax/Main.scala b/src/library/scala/xml/include/sax/Main.scala
index e7e986e0f8..60031b4b6a 100644
--- a/src/library/scala/xml/include/sax/Main.scala
+++ b/src/library/scala/xml/include/sax/Main.scala
@@ -13,11 +13,10 @@ package include.sax
import scala.xml.include._
import scala.util.control.Exception.{ catching, ignoring }
-import org.xml.sax.{ SAXException, SAXParseException, EntityResolver, XMLReader }
+import org.xml.sax.XMLReader
import org.xml.sax.helpers.XMLReaderFactory
object Main {
- private val xercesClass = "org.apache.xerces.parsers.SAXParser"
private val namespacePrefixes = "http://xml.org/sax/features/namespace-prefixes"
private val lexicalHandler = "http://xml.org/sax/properties/lexical-handler"
@@ -27,7 +26,7 @@ object Main {
* </p>
*
* @param args contains the URLs and/or filenames
- * of the documents to be procesed.
+ * of the documents to be processed.
*/
def main(args: Array[String]) {
def saxe[T](body: => T) = catching[T](classOf[SAXException]) opt body
@@ -35,7 +34,7 @@ object Main {
val parser: XMLReader =
saxe[XMLReader](XMLReaderFactory.createXMLReader()) getOrElse (
- saxe[XMLReader](XMLReaderFactory.createXMLReader(xercesClass)) getOrElse (
+ saxe[XMLReader](XMLReaderFactory.createXMLReader(XercesClassName)) getOrElse (
return error("Could not find an XML parser")
)
)
diff --git a/src/library/scala/xml/include/sax/XIncludeFilter.scala b/src/library/scala/xml/include/sax/XIncludeFilter.scala
index b469086e73..6e64fa9aa5 100644
--- a/src/library/scala/xml/include/sax/XIncludeFilter.scala
+++ b/src/library/scala/xml/include/sax/XIncludeFilter.scala
@@ -12,11 +12,10 @@ package scala.xml
package include.sax
import scala.xml.include._
-import org.xml.sax.{ Attributes, SAXException, XMLReader, EntityResolver, Locator }
+import org.xml.sax.{ Attributes, XMLReader, Locator }
import org.xml.sax.helpers.{ XMLReaderFactory, XMLFilterImpl, NamespaceSupport, AttributesImpl }
-import java.net.{ URL, URLConnection, MalformedURLException }
-import java.io.{ UnsupportedEncodingException, IOException, InputStream, BufferedInputStream, InputStreamReader }
+import java.io.{ InputStream, BufferedInputStream, InputStreamReader }
import java.util.Stack
/**
@@ -351,61 +350,49 @@ class XIncludeFilter extends XMLFilterImpl {
be downloaded from the specified URL.
*/
private def includeXMLDocument(url: String) {
- var source: URL = null
- try {
- val base = bases.peek().asInstanceOf[URL]
- source = new URL(base, url)
- }
- catch {
- case e:MalformedURLException =>
- val ex = new UnavailableResourceException("Unresolvable URL " + url
- + getLocation());
- ex.setRootCause(e)
- throw new SAXException("Unresolvable URL " + url + getLocation(), ex)
- }
+ val source =
+ try new URL(bases.peek(), url)
+ catch {
+ case e: MalformedURLException =>
+ val ex = new UnavailableResourceException("Unresolvable URL " + url + getLocation())
+ ex setRootCause e
+ throw new SAXException("Unresolvable URL " + url + getLocation(), ex)
+ }
try {
- // make this more robust
- var parser: XMLReader = null
- try {
- parser = XMLReaderFactory.createXMLReader()
- } catch {
- case e:SAXException =>
- try {
- parser = XMLReaderFactory.createXMLReader(
- "org.apache.xerces.parsers.SAXParser"
- );
- } catch {
- case e2: SAXException =>
- System.err.println("Could not find an XML parser")
- }
- }
- if(parser != null) {
- parser.setContentHandler(this)
- val resolver = this.getEntityResolver()
- if (resolver != null) parser.setEntityResolver(resolver);
- // save old level and base
- val previousLevel = level
- this.level = 0
- if (bases.contains(source)) {
- val e = new CircularIncludeException(
- "Circular XInclude Reference to " + source + getLocation()
- );
- throw new SAXException("Circular XInclude Reference", e)
+ val parser: XMLReader =
+ try XMLReaderFactory.createXMLReader()
+ catch {
+ case e: SAXException =>
+ try XMLReaderFactory.createXMLReader(XercesClassName)
+ catch { case _: SAXException => return System.err.println("Could not find an XML parser") }
}
- bases.push(source)
- atRoot = true
- parser.parse(source.toExternalForm())
- // restore old level and base
- this.level = previousLevel
- bases.pop()
- }
+
+ parser setContentHandler this
+ val resolver = this.getEntityResolver()
+ if (resolver != null)
+ parser setEntityResolver resolver
+
+ // save old level and base
+ val previousLevel = level
+ this.level = 0
+ if (bases contains source)
+ throw new SAXException(
+ "Circular XInclude Reference",
+ new CircularIncludeException("Circular XInclude Reference to " + source + getLocation())
+ )
+
+ bases push source
+ atRoot = true
+ parser parse source.toExternalForm()
+
+ // restore old level and base
+ this.level = previousLevel
+ bases.pop()
}
catch {
- case e:IOException =>
- throw new SAXException("Document not found: "
- + source.toExternalForm() + getLocation(), e)
+ case e: IOException =>
+ throw new SAXException("Document not found: " + source.toExternalForm() + getLocation(), e)
}
-
}
}
diff --git a/src/library/scala/xml/include/sax/XIncluder.scala b/src/library/scala/xml/include/sax/XIncluder.scala
index 3417dd78f0..bd9da10c59 100644
--- a/src/library/scala/xml/include/sax/XIncluder.scala
+++ b/src/library/scala/xml/include/sax/XIncluder.scala
@@ -10,22 +10,13 @@
package scala.xml
package include.sax
+
import scala.xml.include._
+import collection.mutable.Stack
-import org.xml.sax.SAXException
-import org.xml.sax.SAXParseException
-import org.xml.sax.ContentHandler
-import org.xml.sax.EntityResolver
-import org.xml.sax.helpers.XMLReaderFactory
-import org.xml.sax.XMLReader
-import org.xml.sax.Locator
-import org.xml.sax.Attributes
+import org.xml.sax.{ ContentHandler, XMLReader, Locator, Attributes }
import org.xml.sax.ext.LexicalHandler
-
-import java.io.{File, IOException, OutputStream, OutputStreamWriter,
- UnsupportedEncodingException, Writer}
-import java.net.{MalformedURLException, URL}
-import java.util.Stack
+import java.io.{ File, OutputStream, OutputStreamWriter, Writer }
/** XIncluder is a SAX <code>ContentHandler</code>
* that writes its XML document onto an output stream after resolving
@@ -35,8 +26,7 @@ import java.util.Stack
* based on Eliotte Rusty Harold's SAXXIncluder
* </p>
*/
-class XIncluder(outs:OutputStream, encoding:String) extends Object
-with ContentHandler with LexicalHandler {
+class XIncluder(outs: OutputStream, encoding: String) extends ContentHandler with LexicalHandler {
var out = new OutputStreamWriter(outs, encoding)
@@ -153,7 +143,7 @@ with ContentHandler with LexicalHandler {
def startDTD(name: String, publicID: String, systemID: String) {
inDTD = true
// if this is the source document, output a DOCTYPE declaration
- if (entities.size() == 0) {
+ if (entities.isEmpty) {
var id = ""
if (publicID != null) id = " PUBLIC \"" + publicID + "\" \"" + systemID + '"';
else if (systemID != null) id = " SYSTEM \"" + systemID + '"';
@@ -169,7 +159,7 @@ with ContentHandler with LexicalHandler {
def endDTD() {}
def startEntity(name: String) {
- entities.push(name)
+ entities push name
}
def endEntity(name: String) {
diff --git a/src/library/scala/xml/package.scala b/src/library/scala/xml/package.scala
new file mode 100644
index 0000000000..33639ed978
--- /dev/null
+++ b/src/library/scala/xml/package.scala
@@ -0,0 +1,18 @@
+package scala
+
+package object xml {
+ val XercesClassName = "org.apache.xerces.parsers.SAXParser"
+
+ type SAXException = org.xml.sax.SAXException
+ type SAXParseException = org.xml.sax.SAXParseException
+ type EntityResolver = org.xml.sax.EntityResolver
+ type InputSource = org.xml.sax.InputSource
+
+ type SAXParser = javax.xml.parsers.SAXParser
+
+ type IOException = java.io.IOException
+ type UnsupportedEncodingException = java.io.UnsupportedEncodingException
+
+ type URL = java.net.URL
+ type MalformedURLException = java.net.MalformedURLException
+} \ No newline at end of file
diff --git a/src/library/scala/xml/parsing/ConstructingParser.scala b/src/library/scala/xml/parsing/ConstructingParser.scala
index f029fc745a..00f195e9fd 100644
--- a/src/library/scala/xml/parsing/ConstructingParser.scala
+++ b/src/library/scala/xml/parsing/ConstructingParser.scala
@@ -25,28 +25,27 @@ object ConstructingParser {
}
/** An xml parser. parses XML and invokes callback methods of a MarkupHandler.
- * Don't forget to call next.ch on a freshly instantiated parser in order to
- * initialize it. If you get the parser from the object method, initialization
- * is already done for you.
- *
- *<pre>
-object parseFromURL {
- def main(args:Array[String]): Unit = {
- val url = args(0);
- val src = scala.io.Source.fromURL(url);
- val cpa = scala.xml.parsing.ConstructingParser.fromSource(src, false); // fromSource initializes automatically
- val doc = cpa.document();
-
- // let's see what it is
- val ppr = new scala.xml.PrettyPrinter(80,5);
- val ele = doc.docElem;
- Console.println("finished parsing");
- val out = ppr.format(ele);
- Console.println(out);
- }
-}
-</pre>
- */
+ * Don't forget to call next.ch on a freshly instantiated parser in order to
+ * initialize it. If you get the parser from the object method, initialization
+ * is already done for you.
+ *
+ * {{{
+ * object parseFromURL {
+ * def main(args:Array[String]): Unit = {
+ * val url = args(0);
+ * val src = scala.io.Source.fromURL(url);
+ * val cpa = scala.xml.parsing.ConstructingParser.fromSource(src, false); // fromSource initializes automatically
+ * val doc = cpa.document();
+ *
+ * // let's see what it is
+ * val ppr = new scala.xml.PrettyPrinter(80,5);
+ * val ele = doc.docElem;
+ * Console.println("finished parsing");
+ * val out = ppr.format(ele);
+ * Console.println(out);
+ * }
+ * }
+ * }}} */
class ConstructingParser(val input: Source, val preserveWS: Boolean)
extends ConstructingHandler
with ExternalSources
diff --git a/src/library/scala/xml/parsing/DefaultMarkupHandler.scala b/src/library/scala/xml/parsing/DefaultMarkupHandler.scala
index 69c59c30cf..0a8bd7c4d6 100644
--- a/src/library/scala/xml/parsing/DefaultMarkupHandler.scala
+++ b/src/library/scala/xml/parsing/DefaultMarkupHandler.scala
@@ -13,7 +13,7 @@ package scala.xml
package parsing
-/** default implemenation of markup handler always returns NodeSeq.Empty */
+/** default implementation of markup handler always returns NodeSeq.Empty */
abstract class DefaultMarkupHandler extends MarkupHandler {
def elem(pos: Int, pre: String, label: String, attrs: MetaData,
diff --git a/src/library/scala/xml/parsing/FactoryAdapter.scala b/src/library/scala/xml/parsing/FactoryAdapter.scala
index a83f9677a1..6960e05d25 100644
--- a/src/library/scala/xml/parsing/FactoryAdapter.scala
+++ b/src/library/scala/xml/parsing/FactoryAdapter.scala
@@ -12,20 +12,15 @@
package scala.xml
package parsing
-import java.io.{InputStream, Reader, File, FileDescriptor, FileInputStream}
-import collection.mutable.{Stack, StringBuilder}
-import collection.immutable.{List, Nil}
-import collection.{Seq, Iterator}
+import java.io.{ InputStream, Reader, File, FileDescriptor, FileInputStream }
+import collection.mutable.Stack
-import org.xml.sax.{ Attributes, InputSource }
+import org.xml.sax.Attributes
import org.xml.sax.helpers.DefaultHandler
-import javax.xml.parsers.{ SAXParser, SAXParserFactory }
// can be mixed into FactoryAdapter if desired
trait ConsoleErrorHandler extends DefaultHandler
{
- import org.xml.sax.SAXParseException
-
// ignore warning, crimson warns even for entity resolution!
override def warning(ex: SAXParseException): Unit = { }
override def error(ex: SAXParseException): Unit = printError("Error", ex)
diff --git a/src/library/scala/xml/parsing/FatalError.scala b/src/library/scala/xml/parsing/FatalError.scala
index 01b68f6591..73634298fa 100644
--- a/src/library/scala/xml/parsing/FatalError.scala
+++ b/src/library/scala/xml/parsing/FatalError.scala
@@ -10,7 +10,8 @@
package scala.xml
-package parsing;
+package parsing
-
-case class FatalError(msg:String) extends java.lang.RuntimeException(msg);
+/** !!! This is poorly named, but I guess it's in the API.
+ */
+case class FatalError(msg: String) extends java.lang.RuntimeException(msg)
diff --git a/src/library/scala/xml/parsing/MarkupHandler.scala b/src/library/scala/xml/parsing/MarkupHandler.scala
index a0058e8bc4..bcb0e03a07 100644
--- a/src/library/scala/xml/parsing/MarkupHandler.scala
+++ b/src/library/scala/xml/parsing/MarkupHandler.scala
@@ -12,7 +12,8 @@
package scala.xml
package parsing
-import scala.collection.mutable.{HashMap, Map}
+import collection.mutable
+import mutable.HashMap
import scala.io.Source
import scala.util.logging.Logged
import scala.xml.dtd._
@@ -32,7 +33,7 @@ abstract class MarkupHandler extends Logged
val isValidating: Boolean = false
var decls: List[Decl] = Nil
- var ent: Map[String, EntityDecl] = new HashMap[String, EntityDecl]()
+ var ent: mutable.Map[String, EntityDecl] = new HashMap[String, EntityDecl]()
def lookupElemDecl(Label: String): ElemDecl = {
for (z @ ElemDecl(Label, _) <- decls)
@@ -69,7 +70,7 @@ abstract class MarkupHandler extends Logged
*/
def elemEnd(pos: Int, pre: String, label: String): Unit = ()
- /** callback method invoked by MarkupParser after parsing an elementm,
+ /** callback method invoked by MarkupParser after parsing an element,
* between the elemStart and elemEnd callbacks
*
* @param pos the position in the source file
diff --git a/src/library/scala/xml/parsing/MarkupParser.scala b/src/library/scala/xml/parsing/MarkupParser.scala
index a15cd0f7e4..24e0d78c6f 100644
--- a/src/library/scala/xml/parsing/MarkupParser.scala
+++ b/src/library/scala/xml/parsing/MarkupParser.scala
@@ -32,7 +32,13 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
self: MarkupParser with MarkupHandler =>
type PositionType = Int
- type InputType = Source
+ type InputType = Source
+ type ElementType = NodeSeq
+ type AttributesType = (MetaData, NamespaceBinding)
+ type NamespaceType = NamespaceBinding
+
+ def truncatedError(msg: String): Nothing = throw FatalError(msg)
+ def errorNoEnd(tag: String) = throw FatalError("expected closing tag of " + tag)
def xHandleError(that: Char, msg: String) = reportSyntaxError(msg)
@@ -102,30 +108,28 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
md
}
- /** &lt;? prolog ::= xml S?
- * // this is a bit more lenient than necessary...
+ /** Factored out common code.
*/
- def prolog(): Tuple3[Option[String], Option[String], Option[Boolean]] = {
-
- //Console.println("(DEBUG) prolog")
- var n = 0
+ private def prologOrTextDecl(isProlog: Boolean): (Option[String], Option[String], Option[Boolean]) = {
var info_ver: Option[String] = None
var info_enc: Option[String] = None
var info_stdl: Option[Boolean] = None
var m = xmlProcInstr()
+ var n = 0
- xSpaceOpt
+ if (isProlog)
+ xSpaceOpt
m("version") match {
- case null => ;
+ case null => ;
case Text("1.0") => info_ver = Some("1.0"); n += 1
case _ => reportSyntaxError("cannot deal with versions != 1.0")
}
m("encoding") match {
case null => ;
- case Text(enc) =>
+ case Text(enc) =>
if (!isValidIANAEncoding(enc))
reportSyntaxError("\"" + enc + "\" is not a valid encoding")
else {
@@ -133,52 +137,33 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
n += 1
}
}
- m("standalone") match {
- case null => ;
- case Text("yes") => info_stdl = Some(true); n += 1
- case Text("no") => info_stdl = Some(false); n += 1
- case _ => reportSyntaxError("either 'yes' or 'no' expected")
+
+ if (isProlog) {
+ m("standalone") match {
+ case null => ;
+ case Text("yes") => info_stdl = Some(true); n += 1
+ case Text("no") => info_stdl = Some(false); n += 1
+ case _ => reportSyntaxError("either 'yes' or 'no' expected")
+ }
}
if (m.length - n != 0) {
- reportSyntaxError("VersionInfo EncodingDecl? SDDecl? or '?>' expected!");
+ val s = if (isProlog) "SDDecl? " else ""
+ reportSyntaxError("VersionInfo EncodingDecl? %sor '?>' expected!" format s)
}
- //Console.println("[MarkupParser::prolog] finished parsing prolog!");
- Tuple3(info_ver,info_enc,info_stdl)
- }
- /** prolog, but without standalone */
- def textDecl(): Tuple2[Option[String],Option[String]] = {
-
- var info_ver: Option[String] = None
- var info_enc: Option[String] = None
-
- var m = xmlProcInstr()
- var n = 0
-
- m("version") match {
- case null => ;
- case Text("1.0") => info_ver = Some("1.0"); n += 1
- case _ => reportSyntaxError("cannot deal with versions != 1.0")
- }
+ (info_ver, info_enc, info_stdl)
+ }
- m("encoding") match {
- case null => ;
- case Text(enc) =>
- if (!isValidIANAEncoding(enc))
- reportSyntaxError("\"" + enc + "\" is not a valid encoding")
- else {
- info_enc = Some(enc)
- n += 1
- }
- }
+ /** &lt;? prolog ::= xml S?
+ * // this is a bit more lenient than necessary...
+ */
+ def prolog(): (Option[String], Option[String], Option[Boolean]) =
+ prologOrTextDecl(true)
- if (m.length - n != 0) {
- reportSyntaxError("VersionInfo EncodingDecl? or '?>' expected!");
- }
- //Console.println("[MarkupParser::textDecl] finished parsing textdecl");
- Tuple2(info_ver, info_enc);
- }
+ /** prolog, but without standalone */
+ def textDecl(): (Option[String], Option[String]) =
+ prologOrTextDecl(false) match { case (x1, x2, _) => (x1, x2) }
/**
*[22] prolog ::= XMLDecl? Misc* (doctypedecl Misc*)?
@@ -190,8 +175,6 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
*/
def document(): Document = {
-
- //Console.println("(DEBUG) document")
doc = new Document()
this.dtd = null
@@ -204,7 +187,6 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
nextch // is prolog ?
var children: NodeSeq = null
if ('?' == ch) {
- //Console.println("[MarkupParser::document] starts with xml declaration");
nextch;
info_prolog = prolog()
doc.version = info_prolog._1
@@ -212,10 +194,8 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
doc.standAlone = info_prolog._3
children = content(TopScope) // DTD handled as side effect
- } else {
- //Console.println("[MarkupParser::document] does not start with xml declaration");
- //
-
+ }
+ else {
val ts = new NodeBuffer();
content1(TopScope, ts); // DTD handled as side effect
ts &+ content(TopScope);
@@ -228,7 +208,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
case _:ProcInstr => ;
case _:Comment => ;
case _:EntityRef => // todo: fix entities, shouldn't be "special"
- reportSyntaxError("no entity references alllowed here");
+ reportSyntaxError("no entity references allowed here");
case s:SpecialNode =>
if (s.toString().trim().length > 0) //non-empty text nodes not allowed
elemCount = elemCount + 2;
@@ -257,6 +237,14 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
this
}
+ def ch_returning_nextch = { val res = ch ; nextch ; res }
+ def mkProcInstr(position: Int, name: String, text: String): NodeSeq =
+ handle.procInstr(position, name, text)
+
+ def mkAttributes(name: String, pscope: NamespaceBinding) =
+ if (isNameStart (ch)) xAttributes(pscope)
+ else (Null, pscope)
+
/** this method assign the next character to ch and advances in input */
def nextch = {
if (curInput.hasNext) {
@@ -315,27 +303,6 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
(aMap,scope)
}
- /** attribute value, terminated by either ' or ". value may not contain &lt;.
- * AttValue ::= `'` { _ } `'`
- * | `"` { _ } `"`
- */
- def xAttributeValue(): String = {
- val endch = ch
- nextch
- while (ch != endch) {
- if ('<' == ch)
- reportSyntaxError( "'<' not allowed in attrib value" );
- putChar(ch)
- nextch
- }
- nextch
- val str = cbuf.toString()
- cbuf.length = 0
-
- // well-formedness constraint
- normalizeAttributeValue(str)
- }
-
/** entity value, terminated by either ' or ". value may not contain &lt;.
* AttValue ::= `'` { _ } `'`
* | `"` { _ } `"`
@@ -353,35 +320,6 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
str
}
-
- /** parse a start or empty tag.
- * [40] STag ::= '&lt;' Name { S Attribute } [S]
- * [44] EmptyElemTag ::= '&lt;' Name { S Attribute } [S]
- */
- protected def xTag(pscope:NamespaceBinding): (String, MetaData, NamespaceBinding) = {
- val qname = xName
-
- xSpaceOpt
- val (aMap: MetaData, scope: NamespaceBinding) = {
- if (isNameStart(ch))
- xAttributes(pscope)
- else
- (Null, pscope)
- }
- (qname, aMap, scope)
- }
-
- /** [42] '&lt;' xmlEndTag ::= '&lt;' '/' Name S? '&gt;'
- */
- def xEndTag(n: String) = {
- xToken('/')
- val m = xName
- if (n != m)
- reportSyntaxError("expected closing tag of " + n/* +", not "+m*/);
- xSpaceOpt
- xToken('>')
- }
-
/** '&lt;! CharData ::= [CDATA[ ( {char} - {char}"]]&gt;"{char} ) ']]&gt;'
*
* see [15]
@@ -392,14 +330,6 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
xTakeUntil(mkResult, () => pos, "]]>")
}
- /** CharRef ::= "&amp;#" '0'..'9' {'0'..'9'} ";"
- * | "&amp;#x" '0'..'9'|'A'..'F'|'a'..'f' { hexdigit } ";"
- *
- * see [66]
- */
- def xCharRef(ch: () => Char, nextch: () => Unit): String =
- Utility.parseCharRef(ch, nextch, reportSyntaxError _)
-
/** Comment ::= '&lt;!--' ((Char - '-') | ('-' (Char - '-')))* '--&gt;'
*
* see [15]
@@ -576,7 +506,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
*/
def element1(pscope: NamespaceBinding): NodeSeq = {
val pos = this.pos
- val (qname, aMap, scope) = xTag(pscope)
+ val (qname, (aMap, scope)) = xTag(pscope)
val (pre, local) = Utility.prefix(qname) match {
case Some(p) => (p, qname drop p.length+1)
case _ => (null, qname)
@@ -600,50 +530,6 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
res
}
- //def xEmbeddedExpr: MarkupType;
-
- /** Name ::= (Letter | '_' | ':') (NameChar)*
- *
- * see [5] of XML 1.0 specification
- */
- def xName: String = {
- if (isNameStart(ch)) {
- while (isNameChar(ch)) {
- putChar(ch)
- nextch
- }
- val n = cbuf.toString().intern()
- cbuf.length = 0
- n
- } else {
- reportSyntaxError("name expected")
- ""
- }
- }
-
- /** '&lt;?' ProcInstr ::= Name [S ({Char} - ({Char}'&gt;?' {Char})]'?&gt;'
- *
- * see [15]
- */
- def xProcInstr: NodeSeq = {
- val sb:StringBuilder = new StringBuilder()
- val n = xName
- if (isSpace(ch)) {
- xSpace
- while (true) {
- if (ch == '?' && { sb.append( ch ); nextch; ch == '>' }) {
- sb.length = sb.length - 1;
- nextch;
- return handle.procInstr(tmppos, n, sb.toString);
- } else
- sb.append(ch);
- nextch
- }
- };
- xToken("?>")
- handle.procInstr(tmppos, n, sb.toString)
- }
-
/** parse character data.
* precondition: xEmbeddedBlock == false (we are not in a scala block)
*/
@@ -815,8 +701,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
nextch
}
- /** "rec-xml/#ExtSubset" pe references may not occur within markup
- declarations
+ /** "rec-xml/#ExtSubset" pe references may not occur within markup declarations
*/
def intSubset() {
//Console.println("(DEBUG) intSubset()")
@@ -996,50 +881,4 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
pos = curInput.pos
eof = false // must be false, because of places where entity refs occur
}
-
- /** for the moment, replace only character references
- * see spec 3.3.3
- * precond: cbuf empty
- */
- def normalizeAttributeValue(attval: String): String = {
- val s: Seq[Char] = attval
- val it = s.iterator
- while (it.hasNext) {
- it.next match {
- case ' '|'\t'|'\n'|'\r' =>
- cbuf.append(' ');
- case '&' => it.next match {
- case '#' =>
- var c = it.next
- val s = xCharRef ({ () => c }, { () => c = it.next })
- cbuf.append(s)
- case nchar =>
- val nbuf = new StringBuilder()
- var d = nchar
- do {
- nbuf.append(d)
- d = it.next
- } while(d != ';');
- nbuf.toString() match {
- case "lt" => cbuf.append('<')
- case "gt" => cbuf.append('>')
- case "amp" => cbuf.append('&')
- case "apos" => cbuf.append('\'')
- case "quot" => cbuf.append('"')
- case "quote" => cbuf.append('"')
- case name =>
- cbuf.append('&')
- cbuf.append(name)
- cbuf.append(';')
- }
- }
- case c =>
- cbuf.append(c)
- }
- }
- val name = cbuf.toString()
- cbuf.length = 0
- name
- }
-
}
diff --git a/src/library/scala/xml/parsing/MarkupParserCommon.scala b/src/library/scala/xml/parsing/MarkupParserCommon.scala
index 57c46c4685..936515852b 100644
--- a/src/library/scala/xml/parsing/MarkupParserCommon.scala
+++ b/src/library/scala/xml/parsing/MarkupParserCommon.scala
@@ -11,30 +11,191 @@ package parsing
import scala.io.Source
import scala.xml.dtd._
+import scala.annotation.switch
import Utility.Escapes.{ pairs => unescape }
+object MarkupParserCommon {
+ final val SU = '\u001A'
+}
+import MarkupParserCommon._
+
/** This is not a public trait - it contains common code shared
* between the library level XML parser and the compiler's.
* All members should be accessed through those.
*/
private[scala] trait MarkupParserCommon extends TokenTests {
- private final val SU: Char = 0x1A
protected def unreachable = Predef.error("Cannot be reached.")
- // type HandleType // MarkupHandler, SymbolicXMLBuilder
-
+ // type HandleType // MarkupHandler, SymbolicXMLBuilder
type InputType // Source, CharArrayReader
type PositionType // Int, Position
+ type ElementType // NodeSeq, Tree
+ type NamespaceType // NamespaceBinding, Any
+ type AttributesType // (MetaData, NamespaceBinding), mutable.Map[String, Tree]
+
+ def mkAttributes(name: String, pscope: NamespaceType): AttributesType
+ def mkProcInstr(position: PositionType, name: String, text: String): ElementType
+
+ /** parse a start or empty tag.
+ * [40] STag ::= '<' Name { S Attribute } [S]
+ * [44] EmptyElemTag ::= '<' Name { S Attribute } [S]
+ */
+ protected def xTag(pscope: NamespaceType): (String, AttributesType) = {
+ val name = xName
+ xSpaceOpt
+
+ (name, mkAttributes(name, pscope))
+ }
+
+ /** '<?' ProcInstr ::= Name [S ({Char} - ({Char}'>?' {Char})]'?>'
+ *
+ * see [15]
+ */
+ def xProcInstr: ElementType = {
+ val n = xName
+ xSpaceOpt
+ xTakeUntil(mkProcInstr(_, n, _), () => tmppos, "?>")
+ }
+
+ /** attribute value, terminated by either ' or ". value may not contain <.
+ * @param endch either ' or "
+ */
+ def xAttributeValue(endCh: Char): String = {
+ val buf = new StringBuilder
+ while (ch != endCh) {
+ // well-formedness constraint
+ if (ch == '<') return errorAndResult("'<' not allowed in attrib value", "")
+ else if (ch == SU) truncatedError("")
+ else buf append ch_returning_nextch
+ }
+ ch_returning_nextch
+ // @todo: normalize attribute value
+ buf.toString
+ }
+
+ def xAttributeValue(): String = {
+ val str = xAttributeValue(ch_returning_nextch)
+ // well-formedness constraint
+ normalizeAttributeValue(str)
+ }
+
+ private def takeUntilChar(it: Iterator[Char], end: Char): String = {
+ val buf = new StringBuilder
+ while (it.hasNext) it.next match {
+ case `end` => return buf.toString
+ case ch => buf append ch
+ }
+ error("Expected '%s'".format(end))
+ }
+
+ /** [42] '<' xmlEndTag ::= '<' '/' Name S? '>'
+ */
+ def xEndTag(startName: String) {
+ xToken('/')
+ if (xName != startName)
+ errorNoEnd(startName)
+
+ xSpaceOpt
+ xToken('>')
+ }
+
+ /** actually, Name ::= (Letter | '_' | ':') (NameChar)* but starting with ':' cannot happen
+ * Name ::= (Letter | '_') (NameChar)*
+ *
+ * see [5] of XML 1.0 specification
+ *
+ * pre-condition: ch != ':' // assured by definition of XMLSTART token
+ * post-condition: name does neither start, nor end in ':'
+ */
+ def xName: String = {
+ if (ch == SU)
+ truncatedError("")
+ else if (!isNameStart(ch))
+ return errorAndResult("name expected, but char '%s' cannot start a name" format ch, "")
+
+ val buf = new StringBuilder
+
+ do buf append ch_returning_nextch
+ while (isNameChar(ch))
+
+ if (buf.last == ':') {
+ reportSyntaxError( "name cannot end in ':'" )
+ buf.toString dropRight 1
+ }
+ else buf.toString
+ }
+
+ private def attr_unescape(s: String) = s match {
+ case "lt" => "<"
+ case "gt" => ">"
+ case "amp" => "&"
+ case "apos" => "'"
+ case "quot" => "\""
+ case "quote" => "\""
+ case _ => "&" + s + ";"
+ }
+
+ /** Replaces only character references right now.
+ * see spec 3.3.3
+ */
+ private def normalizeAttributeValue(attval: String): String = {
+ val buf = new StringBuilder
+ val it = attval.iterator.buffered
+
+ while (it.hasNext) buf append (it.next match {
+ case ' ' | '\t' | '\n' | '\r' => " "
+ case '&' if it.head == '#' => it.next ; xCharRef(it)
+ case '&' => attr_unescape(takeUntilChar(it, ';'))
+ case c => c
+ })
+
+ buf.toString
+ }
+
+ /** CharRef ::= "&#" '0'..'9' {'0'..'9'} ";"
+ * | "&#x" '0'..'9'|'A'..'F'|'a'..'f' { hexdigit } ";"
+ *
+ * see [66]
+ */
+ def xCharRef(ch: () => Char, nextch: () => Unit): String =
+ Utility.parseCharRef(ch, nextch, reportSyntaxError _)
+
+ def xCharRef(it: Iterator[Char]): String = {
+ var c = it.next
+ Utility.parseCharRef(() => c, () => { c = it.next }, reportSyntaxError _)
+ }
+
+ def xCharRef: String = xCharRef(() => ch, () => nextch)
/** Create a lookahead reader which does not influence the input */
def lookahead(): BufferedIterator[Char]
+ /** The library and compiler parsers had the interesting distinction of
+ * different behavior for nextch (a function for which there are a total
+ * of two plausible behaviors, so we know the design space was fully
+ * explored.) One of them returned the value of nextch before the increment
+ * and one of them the new value. So to unify code we have to at least
+ * temporarily abstract over the nextchs.
+ */
def ch: Char
def nextch: Char
+ def ch_returning_nextch: Char
+ def eof: Boolean
+
+ // def handle: HandleType
+ var tmppos: PositionType
+
def xHandleError(that: Char, msg: String): Unit
def reportSyntaxError(str: String): Unit
def reportSyntaxError(pos: Int, str: String): Unit
- def eof: Boolean
+
+ def truncatedError(msg: String): Nothing
+ def errorNoEnd(tag: String): Nothing
+
+ protected def errorAndResult[T](msg: String, x: T): T = {
+ reportSyntaxError(msg)
+ x
+ }
def xToken(that: Char) {
if (ch == that) nextch
@@ -53,9 +214,16 @@ private[scala] trait MarkupParserCommon extends TokenTests {
if (isSpace(ch)) { nextch; xSpaceOpt }
else xHandleError(ch, "whitespace expected")
- //
+ /** Apply a function and return the passed value */
def returning[T](x: T)(f: T => Unit): T = { f(x) ; x }
+ /** Execute body with a variable saved and restored after execution */
+ def saving[A,B](getter: A, setter: (A) => Unit)(body: => B): B = {
+ val saved = getter
+ try body
+ finally setter(saved)
+ }
+
/** Take characters from input stream until given String "until"
* is seen. Once seen, the accumulated characters are passed
* along with the current Position to the supplied handler function.
@@ -73,7 +241,7 @@ private[scala] trait MarkupParserCommon extends TokenTests {
if (ch == head && peek(rest))
return handler(positioner(), sb.toString)
else if (ch == SU)
- xHandleError(ch, "") // throws TruncatedXML in compiler
+ truncatedError("") // throws TruncatedXMLControl in compiler
sb append ch
nextch
diff --git a/src/library/scala/xml/parsing/NoBindingFactoryAdapter.scala b/src/library/scala/xml/parsing/NoBindingFactoryAdapter.scala
index 05d535155b..083465bc41 100644
--- a/src/library/scala/xml/parsing/NoBindingFactoryAdapter.scala
+++ b/src/library/scala/xml/parsing/NoBindingFactoryAdapter.scala
@@ -12,10 +12,6 @@ package scala.xml
package parsing
import factory.NodeFactory
-import collection.Seq
-import collection.immutable.List
-import org.xml.sax.InputSource
-import javax.xml.parsers.{ SAXParser, SAXParserFactory }
/** nobinding adaptor providing callbacks to parser to create elements.
* implements hash-consing
diff --git a/src/library/scala/xml/parsing/TokenTests.scala b/src/library/scala/xml/parsing/TokenTests.scala
index e41cff20a3..13500e8510 100644
--- a/src/library/scala/xml/parsing/TokenTests.scala
+++ b/src/library/scala/xml/parsing/TokenTests.scala
@@ -12,8 +12,6 @@
package scala.xml
package parsing
-import collection.Seq
-import collection.immutable.List
/**
* Helper functions for parsing XML fragments
*/
diff --git a/src/library/scala/xml/parsing/ValidatingMarkupHandler.scala b/src/library/scala/xml/parsing/ValidatingMarkupHandler.scala
index 06828b7320..00126c4881 100644
--- a/src/library/scala/xml/parsing/ValidatingMarkupHandler.scala
+++ b/src/library/scala/xml/parsing/ValidatingMarkupHandler.scala
@@ -51,7 +51,7 @@ abstract class ValidatingMarkupHandler extends MarkupHandler with Logged {
log("advanceDFA(trans): " + trans)
trans.get(ContentModel.ElemName(label)) match {
case Some(qNew) => qCurrent = qNew
- case _ => reportValidationError(pos, "DTD says, wrong element, expected one of "+trans.keysIterator);
+ case _ => reportValidationError(pos, "DTD says, wrong element, expected one of "+trans.keys);
}
}
// advance in current automaton
diff --git a/src/library/scala/xml/parsing/XhtmlEntities.scala b/src/library/scala/xml/parsing/XhtmlEntities.scala
index dbc2ae0621..6e35aa9606 100644
--- a/src/library/scala/xml/parsing/XhtmlEntities.scala
+++ b/src/library/scala/xml/parsing/XhtmlEntities.scala
@@ -8,11 +8,10 @@
// $Id$
-
package scala.xml
package parsing
-import scala.xml.dtd.{IntDef, ParsedEntityDecl}
+import scala.xml.dtd.{ IntDef, ParsedEntityDecl }
/** <p>
* (c) David Pollak 2007 WorldWide Conferencing, LLC.
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/Attribute.java b/src/msil/ch/epfl/lamp/compiler/msil/Attribute.java
index b136f9423e..0a9ef6929e 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/Attribute.java
+++ b/src/msil/ch/epfl/lamp/compiler/msil/Attribute.java
@@ -55,7 +55,7 @@ public class Attribute {
return value;
}
- /**@return an array with the arguments to the attribute's contructor. */
+ /**@return an array with the arguments to the attribute's constructor. */
public Object[] getConstructorArguments() {
parseBlob();
Object[] cas = new Object[constrArgs.length];
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/PEFile.java b/src/msil/ch/epfl/lamp/compiler/msil/PEFile.java
index 459bb39a20..457a1d8c2b 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/PEFile.java
+++ b/src/msil/ch/epfl/lamp/compiler/msil/PEFile.java
@@ -854,10 +854,18 @@ public class PEFile {
while (getByte() == ELEMENT_TYPE_CMOD_OPT
|| getByte() == ELEMENT_TYPE_CMOD_REQD)
{
- Type t = decodeType();
- System.err.println("CMOD: " + t);
- if (getByte() == ELEMENT_TYPE_CMOD_REQD)
- throw new RuntimeException("Reqired CMOD: " + t);
+ // skip the tag 23.2.7
+ readByte();
+ // skip the TypeDefOrRefEncoded (23.2.8)
+ readByte();
+ readByte();
+
+ // @FIXME: could be 4 bytes, not always 2...
+
+ //Type t = decodeType();
+ //System.err.println("CMOD: " + t);
+ //if (getByte() == ELEMENT_TYPE_CMOD_REQD)
+ //throw new RuntimeException("Reqired CMOD: " + t);
}
}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/PEModule.java b/src/msil/ch/epfl/lamp/compiler/msil/PEModule.java
index a6e7bb31b2..78c17038cb 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/PEModule.java
+++ b/src/msil/ch/epfl/lamp/compiler/msil/PEModule.java
@@ -143,8 +143,17 @@ final class PEModule extends Module {
Assembly assem = getAssembly(name);
type = assem.GetType(typeName);
if (type == null) {
- throw new RuntimeException("Failed to locate type " +
- typeName + " in assembly " + assem);
+ // HACK: the IKVM.OpenJDK.Core assembly is compiled against mscorlib.dll v2.0
+ // The MSIL library cannot parse the v2.0 mscorlib because of generics, so we
+ // use the v1.0
+ // However, the java.io.FileDescriptor.FlushFileBuffers method uses a type
+ // Microsoft.Win32.SafeHandles.SafeFileHandle, which only exists in mscorlib
+ // v2.0
+ // For now, jsut return Object (fine as long as we don't use that method).
+ Assembly asmb = getAssembly("mscorlib");
+ type = asmb.GetType("System.Object");
+ //throw new RuntimeException("Failed to locate type " +
+ //typeName + " in assembly " + assem);
}
break;
case ModuleDef.ID:
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/ILPrinterVisitor.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/ILPrinterVisitor.scala
index 4644cade72..0e0e337ceb 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/ILPrinterVisitor.scala
+++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/ILPrinterVisitor.scala
@@ -20,7 +20,7 @@ import ch.epfl.lamp.compiler.msil._
import ch.epfl.lamp.compiler.msil.util.Table
/**
- * The MSIL printer Vistor. It prints a complete
+ * The MSIL printer Visitor. It prints a complete
* assembly in a single or multiple files. Then this file can be compiled by ilasm.
*
* @author Nikolay Mihaylov
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/MethodBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/MethodBuilder.scala
index eb86c96de5..a1c5091547 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/MethodBuilder.scala
+++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/MethodBuilder.scala
@@ -40,7 +40,7 @@ class MethodBuilder(name: String, declType: Type, attrs: Int, returnType: Type,
def GetILGenerator(): ILGenerator = {
if (ilGenerator == null)
throw new RuntimeException
- ("No code generator avaiable for this method: " + this)
+ ("No code generator available for this method: " + this)
return ilGenerator
}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/MultipleFilesILPrinterVisitor.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/MultipleFilesILPrinterVisitor.scala
index 63776cc084..b1b6d41eb5 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/MultipleFilesILPrinterVisitor.scala
+++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/MultipleFilesILPrinterVisitor.scala
@@ -20,8 +20,8 @@ import ch.epfl.lamp.compiler.msil.emit
import ch.epfl.lamp.compiler.msil.util.Table
/**
- * The MSIL printer Vistor. It prints a complete
- * assembly into seperate files. Then these files can be compiled by ilasm.
+ * The MSIL printer Visitor. It prints a complete
+ * assembly into separate files. Then these files can be compiled by ilasm.
*
* @author Nikolay Mihaylov
* @author Daniel Lorch
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCode.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCode.scala
index 1bd8e48633..835bdcadd0 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCode.scala
+++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCode.scala
@@ -1139,7 +1139,7 @@ object OpCode {
opcode(Rem_Un, CEE_REM_UN, "rem.un" , 0xFFFFFF5E, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT)
/**
- * Computes the bitwise AND of two values and pushes the result onto the evalution stack.
+ * Computes the bitwise AND of two values and pushes the result onto the evaluation stack.
*/
final val And = new OpCode()
opcode(And, CEE_AND, "and" , 0xFFFFFF5F, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT)
@@ -1585,7 +1585,7 @@ object OpCode {
opcode(Conv_Ovf_I2, CEE_CONV_OVF_I2, "conv.ovf.i2", 0xFFFFFFB5, POP_1, PUSH_I , INLINE_NONE , FLOW_NEXT)
/**
- * Converts the signed value on top of the sevaluation tack to signed int32,
+ * Converts the signed value on top of the evaluation stack to signed int32,
* throwing OverflowException on overflow.
*/
final val Conv_Ovf_I4 = new OpCode()
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCodes.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCodes.scala
index f8e0f140a8..557b022f54 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCodes.scala
+++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCodes.scala
@@ -521,7 +521,7 @@ object OpCodes {
final val Rem_Un = OpCode.Rem_Un
/**
- * Computes the bitwise AND of two values and pushes the result onto the evalution stack.
+ * Computes the bitwise AND of two values and pushes the result onto the evaluation stack.
*/
final val And = OpCode.And
@@ -899,7 +899,7 @@ object OpCodes {
final val Conv_Ovf_I2 = OpCode.Conv_Ovf_I2
/**
- * Converts the signed value on top of the sevaluation tack to signed int32,
+ * Converts the signed value on top of the evaluation stack to signed int32,
* throwing OverflowException on overflow.
*/
final val Conv_Ovf_I4 = OpCode.Conv_Ovf_I4
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/SingleFileILPrinterVisitor.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/SingleFileILPrinterVisitor.scala
index 5ebc5ea32f..7a880f2ac6 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/SingleFileILPrinterVisitor.scala
+++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/SingleFileILPrinterVisitor.scala
@@ -19,7 +19,7 @@ import ch.epfl.lamp.compiler.msil.emit
import ch.epfl.lamp.compiler.msil.util.Table
/**
- * The MSIL printer Vistor. It prints a complete
+ * The MSIL printer Visitor. It prints a complete
* assembly in a single file. Then this file can be compiled by ilasm.
*
* @author Nikolay Mihaylov
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/TypeBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/TypeBuilder.scala
index 84fd2a4023..2c0ab29b90 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/TypeBuilder.scala
+++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/TypeBuilder.scala
@@ -141,14 +141,19 @@ class TypeBuilder (module: Module, attributes: Int, fullName: String, baseType:
/** Searches for the nested type with the specified name. */
override def GetNestedType(name: String): Type = {
- testRaw(name)
- return super.GetNestedType(name)
+ testRaw(name)
+ super.GetNestedType(name)
}
/** Returns all the types nested within the current Type. */
override def GetNestedTypes(): Array[Type] = {
- testRaw("<GetNestedTypes>")
- return super.GetNestedTypes()
+ testRaw("<GetNestedTypes>")
+ super.GetNestedTypes()
+ }
+
+ /** Returns a Type object that represents a one-dimensional array of the current type */
+ def MakeArrayType(): Type = {
+ Type.mkArray(this, 1)
}
/** Sets a custom attribute. */
@@ -184,8 +189,7 @@ class TypeBuilder (module: Module, attributes: Int, fullName: String, baseType:
// i.e. not finalized by call to CreateType
protected def testRaw(member: String) {
if (raw)
- throw new RuntimeException
- ("Not supported for TypeBuilder before CreateType(): " +
+ throw new RuntimeException("Not supported for TypeBuilder before CreateType(): " +
FullName + "::" + member)
}
diff --git a/src/partest/README b/src/partest/README
index 430a2987f8..c7673fe2f8 100644
--- a/src/partest/README
+++ b/src/partest/README
@@ -1,31 +1,50 @@
-How partest choses the compiler / library:
+If you're looking for something to read, I suggest running ../test/partest
+with no arguments, which at this moment prints this:
- * ''-Dscalatest.build=build/four-pack'' -> will search for libraries in
- ''lib'' directory of given path
- * ''--pack'' -> will set ''scalatest.build=build/pack'', and run all tests.
- add ''--[kind]'' to run a selected set of tests.
- * auto detection:
- - scalatest.build property -> ''bin'' / ''lib'' directories
- - distribution (''dists/latest'')
- - supersabbus pack (''build/pack'')
- - sabbus quick (''build/quick'')
- - installed dist (test files in ''misc/scala-test/files'')
+Usage: partest [<options>] [<test> <test> ...]
+ <test>: a path to a test designator, typically a .scala file or a directory.
+ Examples: files/pos/test1.scala, files/res/bug785
-How partest choses test files: the test files must be accessible from
-the directory on which partest is run. So the test files must be either
-at:
- * ./test/files
- * ./files (cwd is "test")
- * ./misc/scala-test/files (installed scala distribution)
+ Test categories:
+ --all run all tests (default, unless no options given)
+ --pos Compile files that are expected to build
+ --neg Compile files that are expected to fail
+ --run Test JVM backend
+ --jvm Test JVM backend
+ --res Run resident compiler scenarii
+ --buildmanager Run Build Manager scenarii
+ --scalacheck Run Scalacheck tests
+ --script Run script files
+ --shootout Run shootout tests
+ --scalap Run scalap tests
-Other arguments:
- * --pos next files test a compilation success
- * --neg next files test a compilation failure
- * --run next files test the interpreter and all backends
- * --jvm next files test the JVM backend
- * --res next files test the resident compiler
- * --shootout next files are shootout tests
- * --script next files test the script runner
- * ''-Dscalatest.scalac_opts=...'' -> add compiler options
- * ''--verbose'' -> print verbose messages
- * ''-Dpartest.debug=true'' -> print debug messages
+ Test "smart" categories:
+ --grep run all tests with a source file containing <expr>
+ --failed run all tests which failed on the last run
+
+ Specifying paths and additional flags, ~ means repository root:
+ --rootdir path from ~ to partest (default: test)
+ --builddir path from ~ to test build (default: build/pack)
+ --srcdir path from --rootdir to sources (default: files)
+ --javaopts flags to java on all runs (overrides JAVA_OPTS)
+ --scalacopts flags to scalac on all tests (overrides SCALAC_OPTS)
+ --pack alias for --builddir build/pack
+ --quick alias for --builddir build/quick
+
+ Options influencing output:
+ --trace show the individual steps taken by each test
+ --show-diff show diff between log and check file
+ --show-log show log on failures
+ --dry-run do not run tests, only show their traces.
+ --terse be less verbose (almost silent except for failures)
+ --verbose be more verbose (additive with --trace)
+ --debug maximum debugging output
+ --ansi print output in color
+
+ Other options:
+ --timeout Timeout in seconds
+ --cleanup delete all stale files and dirs before run
+ --nocleanup do not delete any logfiles or object dirs
+ --stats collect and print statistics about the tests
+ --validate examine test filesystem for inconsistencies
+ --version print version
diff --git a/src/partest/scala/tools/partest/Actions.scala b/src/partest/scala/tools/partest/Actions.scala
new file mode 100644
index 0000000000..3e745714cb
--- /dev/null
+++ b/src/partest/scala/tools/partest/Actions.scala
@@ -0,0 +1,168 @@
+/* __ *\
+** ________ ___ / / ___ Scala Parallel Testing **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.tools
+package partest
+
+import util._
+import nsc.io._
+
+trait Actions {
+ partest: Universe =>
+
+ class TestSequence(val actions: List[TestStep]) extends AbsTestSequence {
+ }
+
+ implicit def createSequence(xs: List[TestStep]) = new TestSequence(xs)
+
+ trait ExecSupport {
+ self: TestEntity =>
+
+ def execEnv: Map[String, String] = {
+ val map = assembleEnvironment()
+ val cwd = execCwd.toList map ("CWD" -> _.path)
+
+ map ++ cwd
+ }
+ def execCwd = if (commandFile.isFile) Some(sourcesDir) else None
+
+ def runExec(args: List[String]): Boolean = {
+ val cmd = fromArgs(args)
+
+ if (isVerbose) {
+ trace(execEnv.mkString("ENV(", "\n", "\n)"))
+ execCwd foreach (x => trace("CWD(" + x + ")"))
+ }
+
+ trace(cmd)
+ isDryRun || execAndLog(cmd)
+ }
+
+ /** Exec a process to run a command. Assumes 0 exit value is success.
+ * Of necessity, also treats no available exit value as success.
+ */
+ protected def execAndLog(cmd: String): Boolean = {
+ var proc: Process = null
+
+ val result = interruptMeIn(testTimeout) {
+ loggingResult {
+ proc = Process.exec(toArgs(cmd), execEnv, execCwd.orNull, true)
+ proc.slurp()
+ }
+ proc.waitFor() == 0
+ }
+ result getOrElse {
+ warning("Process never terminated: '%s'" format cmd)
+ if (proc != null)
+ proc.destroy()
+
+ false
+ }
+ }
+ }
+
+ trait ScriptableTest {
+ self: TestEntity =>
+
+ // def customTestStep(line: String): TestStep
+
+ /** Translates a line from a .cmds file into a teststep.
+ */
+ def customTestStep(line: String): TestStep = {
+ val (cmd, rest) = line span (x => !Character.isWhitespace(x))
+ val args = toArgs(rest)
+ def fail: TestStep = (_: TestEntity) => error("Parse error: did not understand '%s'" format line)
+
+ val f: TestEntity => Boolean = cmd match {
+ case "scalac" => _ scalac args
+ case "javac" => _ javac args
+ case "scala" => _ runScala args
+ case "diff" => if (args.size != 2) fail else _ => diffFiles(File(args(0)), File(args(1))) == ""
+ case _ => fail
+ }
+ f
+ }
+ }
+
+ trait CompilableTest extends CompileExecSupport {
+ self: TestEntity =>
+
+ def sourceFiles = location.walk collect { case f: File if isJavaOrScala(f) => f } toList
+ def allSources = sourceFiles map (_.path)
+ def scalaSources = sourceFiles filter isScala map (_.path)
+ def javaSources = sourceFiles filter isJava map (_.path)
+
+ /** If there are mixed java and scala files, the standard compilation
+ * sequence is:
+ *
+ * scalac with all files
+ * javac with only java files
+ * scalac with only scala files
+ *
+ * This should be expanded to encompass other strategies so we know how
+ * well they're working or not working - notably, it would be very useful
+ * to know exactly when and how two-pass compilation fails.
+ */
+ def compile() = {
+ def compileJava() = javac(javaSources)
+ def compileScala() = scalac(scalaSources)
+ def compileAll() = scalac(allSources)
+ def compileMixed() = compileAll() && compileJava() && compileScala()
+
+ if (scalaSources.nonEmpty && javaSources.nonEmpty) compileMixed()
+ else compileScala()
+ }
+ }
+
+ trait DiffableTest {
+ self: TestEntity =>
+
+ def checkFile: File = withExtension("check").toFile
+
+ def normalizePaths(s: String) = {
+ /** This accomodates slash/backslash issues by noticing when a given
+ * line was altered, which means it held a path, and only then converting any
+ * backslashes to slashes. It's not foolproof but it's as close as we
+ * can get in one line.
+ */
+ val s2 = s.replaceAll("""(?m)\Q%s\E""" format (sourcesDir + File.separator), "")
+ if (s != s2) s2.replaceAll("""\\""", "/") else s2
+ }
+
+ /** The default cleanup normalizes paths relative to sourcesDir.
+ */
+ def diffCleanup(f: File) = safeLines(f) map normalizePaths mkString "\n"
+
+ /** If optional is true, a missing check file is considered
+ * a successful diff. Necessary since many categories use
+ * checkfiles in an ad hoc manner.
+ */
+ def runDiff(check: File, log: File) = {
+ def arg1 = tracePath(check)
+ def arg2 = tracePath(log)
+ def noCheck = !check.exists && returning(true)(_ => trace("diff %s %s [unchecked]".format(arg1, arg2)))
+
+ noCheck || {
+ def result = safeSlurp(check).trim == diffCleanup(log).trim
+ def msg = if (result) "passed" else "failed"
+
+ if (isDryRun) {
+ trace("diff %s %s".format(arg1, arg2))
+ true
+ }
+ else {
+ trace("diff %s %s [%s]".format(arg1, arg2, msg))
+ result
+ }
+ }
+ }
+
+ private def cleanedLog = returning(File makeTemp "partest-diff")(_ writeAll diffCleanup(logFile))
+ def diffOutput(): String = checkFile ifFile (f => diffFiles(f, cleanedLog)) getOrElse ""
+ }
+}
diff --git a/src/partest/scala/tools/partest/Alarms.scala b/src/partest/scala/tools/partest/Alarms.scala
new file mode 100644
index 0000000000..72afc232e5
--- /dev/null
+++ b/src/partest/scala/tools/partest/Alarms.scala
@@ -0,0 +1,85 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools
+package partest
+
+import java.util.{ Timer, TimerTask }
+
+trait Alarms {
+ self: Universe =>
+
+ def interruptMeIn[T](seconds: Int)(body: => T): Option[T] = {
+ val thisThread = currentThread
+ val alarm = new SimpleAlarm(seconds * 1000) set thisThread.interrupt()
+
+ try { Some(body) }
+ catch { case _: InterruptedException => None }
+ finally { alarm.cancel() ; Thread.interrupted() }
+ }
+
+ case class AlarmerAction(secs: Int, action: () => Unit) extends Runnable {
+ override def run() = action()
+ }
+
+ /** Set any number of alarms up with tuples of the form:
+ * seconds to alarm -> Function0[Unit] to execute
+ */
+ class Alarmer(alarms: AlarmerAction*) {
+ import java.util.concurrent._
+
+ val exec = Executors.newSingleThreadScheduledExecutor()
+ alarms foreach (x => exec.schedule(x, x.secs, TimeUnit.SECONDS))
+ exec.shutdown()
+
+ def cancelAll() = exec.shutdownNow()
+ }
+
+ class SimpleAlarm(timeout: Long) {
+ private val alarm = new Timer
+
+ /** Start a timer, running the given body if it goes off.
+ */
+ def set(body: => Unit) = returning(new TimerTask { def run() = body })(alarm.schedule(_, timeout))
+
+ /** Cancel the timer.
+ */
+ def cancel() = alarm.cancel()
+ }
+
+ trait TestAlarms {
+ test: TestEntity =>
+
+ private def warning1 = AlarmerAction(testWarning, () => warning(
+ """|I've been waiting %s seconds for this to complete:
+ | %s
+ |It may be stuck, or if not, it should be broken into smaller tests.
+ |""".stripMargin.format(testWarning, test))
+ )
+ private def warning2 = AlarmerAction(testWarning * 2, () => warning(
+ """|Now I've been waiting %s seconds for this to complete:
+ | %s
+ |If partest seems hung it would be a good place to look.
+ |""".stripMargin.format(testWarning * 2, test))
+ )
+
+ def startAlarms(onTimeout: => Unit) =
+ if (isNoAlarms) new Alarmer() // for alarm debugging
+ else new Alarmer(Seq(warning1, warning2, AlarmerAction(testTimeout, () => onTimeout)): _*)
+ }
+
+ // Thread.setDefaultUncaughtExceptionHandler(new UncaughtException)
+ // class UncaughtException extends Thread.UncaughtExceptionHandler {
+ // def uncaughtException(t: Thread, e: Throwable) {
+ // Console.println("Uncaught in %s: %s".format(t, e))
+ // }
+ // }
+ //
+ // lazy val logger = File("/tmp/partest.log").bufferedWriter()
+ // def flog(msg: String) = logger synchronized {
+ // logger write (msg + "\n")
+ // logger.flush()
+ // }
+}
diff --git a/src/partest/scala/tools/partest/BuildContributors.scala b/src/partest/scala/tools/partest/BuildContributors.scala
new file mode 100644
index 0000000000..64c7e07bc3
--- /dev/null
+++ b/src/partest/scala/tools/partest/BuildContributors.scala
@@ -0,0 +1,102 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2010 LAMP/EPFL
+ */
+
+package scala.tools
+package partest
+
+import nsc.io._
+import nsc.util.ClassPath
+
+trait BuildContributors {
+ universe: Universe =>
+
+ /** A trait mixed into types which contribute a portion of the values.
+ * The basic mechanism is the TestBuild, TestCategory, and TestEntity
+ * can each contribute to each value. They are assembled at the last
+ * moment by the ContributorAssembler (presently the TestEntity.)
+ */
+ trait BuildContributor {
+ def javaFlags: List[String]
+ def scalacFlags: List[String]
+ def classpathPaths: List[Path]
+ def buildProperties: List[(String, Any)]
+ def buildEnvironment: Map[String, String]
+ }
+
+ trait ContributorAssembler {
+ def contributors: List[BuildContributor]
+ def assemble[T](what: BuildContributor => List[T]): List[T] = contributors flatMap what
+
+ /** !!! This will need work if we want to achieve real composability,
+ * but it can wait for the demand.
+ */
+ def assembleScalacArgs(args: List[String]) = assemble(_.scalacFlags) ++ args
+ def assembleJavaArgs(args: List[String]) = assemble(_.javaFlags) ++ args
+ def assembleProperties() = assemble(_.buildProperties)
+ def assembleClasspaths(paths: List[Path]) = assemble(_.classpathPaths) ++ paths
+ def assembleEnvironment() = assemble(_.buildEnvironment.toList).toMap
+
+ def createClasspathString() = ClassPath fromPaths (assembleClasspaths(Nil) : _*)
+ def createPropertyString() = assembleProperties() map { case (k, v) => "-D%s=%s".format(k, v.toString) }
+ }
+
+ trait BuildContribution extends BuildContributor {
+ self: TestBuild =>
+
+ /** The base classpath and system properties.
+ * !!! TODO - this should adjust itself depending on the build
+ * being tested, because pack and quick at least need different jars.
+ */
+ def classpathPaths = List[Path](library, compiler, partest, fjbg) ++ forkJoinPath
+ def buildProperties = List(
+ "scala.home" -> testBuildDir,
+ "partest.lib" -> library, // used in jvm/inner
+ "java.awt.headless" -> true,
+ "user.language" -> "en",
+ "user.country" -> "US",
+ "partest.debug" -> isDebug,
+ "partest.verbose" -> isVerbose
+ // Disabled because there are no natives tests.
+ // "java.library.path" -> srcLibDir
+ )
+ def javaFlags: List[String] = toArgs(javaOpts)
+ def scalacFlags: List[String] = toArgs(scalacOpts)
+
+ /** We put the build being tested's /bin directory in the front of the
+ * path so the scripts and such written to execute "scala" will use this
+ * build and not whatever happens to be on their path.
+ */
+ private def modifiedPath = ClassPath.join(scalaBin.path, Properties.envOrElse("PATH", ""))
+ def buildEnvironment = Map("PATH" -> modifiedPath)
+ }
+
+ trait CategoryContribution extends BuildContributor {
+ self: DirBasedCategory =>
+
+ /** Category-wide classpath additions placed in <category>/lib. */
+ private def libContents = root / "lib" ifDirectory (_.list.toList)
+
+ def classpathPaths = libContents getOrElse Nil
+ def buildProperties = Nil
+ def javaFlags = Nil
+ def scalacFlags = Nil
+ def buildEnvironment = Map()
+ }
+
+ trait TestContribution extends BuildContributor with ContributorAssembler {
+ self: TestEntity =>
+
+ def jarsInTestDir = location.walk collect { case f: File if f hasExtension "jar" => f } toList
+
+ def contributors = List(build, category, self)
+ def javaFlags = safeArgs(javaOptsFile)
+ def scalacFlags = safeArgs(scalaOptsFile)
+ def classpathPaths = jarsInTestDir :+ outDir
+ def buildProperties = List(
+ "partest.output" -> outDir.toAbsolute, // used in jvm/inner
+ "partest.cwd" -> outDir.parent.toAbsolute // used in shootout tests
+ )
+ def buildEnvironment = Map("JAVA_OPTS" -> fromArgs(assembleJavaArgs(Nil)))
+ }
+} \ No newline at end of file
diff --git a/src/partest/scala/tools/partest/Categories.scala b/src/partest/scala/tools/partest/Categories.scala
new file mode 100644
index 0000000000..1d5a21153f
--- /dev/null
+++ b/src/partest/scala/tools/partest/Categories.scala
@@ -0,0 +1,69 @@
+/* __ *\
+** ________ ___ / / ___ Scala Parallel Testing **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.tools
+package partest
+
+import nsc.Settings
+import nsc.io._
+import nsc.util.{ ClassPath }
+
+trait Categories {
+ self: Universe =>
+
+ trait TestCategory extends AbsTestCategory {
+ def kind: String
+ def startMessage: String = "Executing test group"
+ def testSequence: TestSequence
+
+ class TestSettings(entity: TestEntity, error: String => Unit) extends Settings(error) {
+ def this(entity: TestEntity) = this(entity, Console println _)
+
+ deprecation.value = false
+ encoding.value = "ISO-8859-1"
+ classpath.value = entity.testClasspath
+ outdir.value = entity.outDir.path
+ }
+
+ def createSettings(entity: TestEntity): TestSettings = new TestSettings(entity)
+ def createTest(location: Path): TestEntity =
+ if (location.isFile) TestFile(this, location.toFile)
+ else if (location.isDirectory) TestDirectory(this, location.toDirectory)
+ else error("Failed to create test at '%s'" format location)
+
+ /** Category test identification.
+ */
+ def denotesTestFile(p: Path) = p.isFile && (p hasExtension "scala")
+ def denotesTestDir(p: Path) = p.isDirectory && !ignorePath(p)
+ def denotesTest(p: Path) = denotesTestDir(p) || denotesTestFile(p)
+
+ /** This should verify that all necessary files are present.
+ * By default it delegates to denotesTest.
+ */
+ def denotesValidTest(p: Path) = denotesTest(p)
+ }
+
+ abstract class DirBasedCategory(val kind: String) extends TestCategory with CategoryContribution {
+ lazy val root = Directory(src / kind).normalize
+ def enumerate = root.list filter denotesTest map createTest toList
+
+ /** Standard actions. These can be overridden either on the
+ * Category level or by individual tests.
+ */
+ def compile: TestStep = (_: TestEntity).compile()
+ def diff: TestStep = (_: TestEntity).diff()
+ def run: TestStep = (_: TestEntity).run()
+ def exec: TestStep = (_: TestEntity).exec()
+
+ /** Combinators.
+ */
+ def not(f: TestStep): TestStep = !f(_: TestEntity)
+
+ override def toString = kind
+ }
+} \ No newline at end of file
diff --git a/src/partest/scala/tools/partest/Compilable.scala b/src/partest/scala/tools/partest/Compilable.scala
new file mode 100644
index 0000000000..a1d987ad6d
--- /dev/null
+++ b/src/partest/scala/tools/partest/Compilable.scala
@@ -0,0 +1,103 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2010 LAMP/EPFL
+ */
+
+package scala.tools
+package partest
+
+import scala.tools.nsc.io._
+import scala.tools.nsc.{ Global, Settings, CompilerCommand, FatalError }
+import scala.tools.nsc.util.{ ClassPath }
+import scala.tools.nsc.reporters.{ Reporter, ConsoleReporter }
+
+trait PartestCompilation {
+ self: Universe =>
+
+ trait CompileExecSupport extends ExecSupport {
+ self: TestEntity =>
+
+ def javacpArg = "-classpath " + testClasspath
+ def scalacpArg = "-usejavacp"
+
+ /** Not used, requires tools.jar.
+ */
+ // def javacInternal(args: List[String]) = {
+ // import com.sun.tools.javac.Main
+ // Main.compile(args.toArray, logWriter)
+ // }
+
+ def javac(args: List[String]): Boolean = {
+ // javac -d outdir -classpath <basepath> <files>
+ val cmd = "%s -d %s %s %s".format(javacCmd, outDir, javacpArg, fromArgs(args))
+ def traceMsg =
+ if (isVerbose) cmd
+ else "%s -d %s %s".format(tracePath(Path(javacCmd)), tracePath(outDir), fromArgs(args))
+
+ trace(traceMsg)
+
+ isDryRun || execAndLog(cmd)
+ }
+
+ def scalac(args: List[String]): Boolean = {
+ val allArgs = assembleScalacArgs(args)
+ val (global, files) = newGlobal(allArgs)
+ val foundFiles = execCwd match {
+ case Some(cwd) => files map (x => File(cwd / x))
+ case _ => files map (x => File(x))
+ }
+ def nonFileArgs = if (isVerbose) global.settings.recreateArgs else assembleScalacArgs(Nil)
+ def traceArgs = fromArgs(nonFileArgs ++ (foundFiles map tracePath))
+ def traceMsg =
+ if (isVerbose) "%s %s".format(build.scalaBin / "scalac", traceArgs)
+ else "scalac " + traceArgs
+
+ trace(traceMsg)
+ isDryRun || global.partestCompile(foundFiles map (_.path), true)
+ }
+
+ /** Actually running the test, post compilation.
+ * Normally args will be List("Test", "jvm"), main class and arg to it.
+ */
+ def runScala(args: List[String]): Boolean = {
+ val scalaRunnerClass = "scala.tools.nsc.MainGenericRunner"
+
+ // java $JAVA_OPTS <javaopts> -classpath <cp>
+ val javaCmdAndOptions = javaCmd +: assembleJavaArgs(List(javacpArg))
+ // MainGenericRunner -usejavacp <scalacopts> Test jvm
+ val scalaCmdAndOptions = List(scalaRunnerClass, scalacpArg) ++ assembleScalacArgs(args)
+ // Assembled
+ val cmd = fromArgs(javaCmdAndOptions ++ createPropertyString() ++ scalaCmdAndOptions)
+
+ def traceMsg = if (isVerbose) cmd else fromArgs(javaCmd :: args)
+ trace(traceMsg)
+
+ isDryRun || execAndLog(cmd)
+ }
+
+ def newReporter(settings: Settings) = new ConsoleReporter(settings, Console.in, logWriter)
+
+ class PartestGlobal(settings: Settings, val creporter: ConsoleReporter) extends Global(settings, creporter) {
+ def partestCompile(files: List[String], printSummary: Boolean): Boolean = {
+ try { new Run compile files }
+ catch { case FatalError(msg) => creporter.error(null, "fatal error: " + msg) }
+
+ if (printSummary)
+ creporter.printSummary
+
+ creporter.flush()
+ !creporter.hasErrors
+ }
+ }
+
+ def newGlobal(args: List[String]): (PartestGlobal, List[String]) = {
+ val settings = category createSettings self
+ val command = new CompilerCommand(args, settings)
+ val reporter = newReporter(settings)
+
+ if (!command.ok)
+ debug("Error parsing arguments: '%s'".format(args mkString ", "))
+
+ (new PartestGlobal(command.settings, reporter), command.files)
+ }
+ }
+} \ No newline at end of file
diff --git a/src/partest/scala/tools/partest/Config.scala b/src/partest/scala/tools/partest/Config.scala
new file mode 100644
index 0000000000..7d8bb80835
--- /dev/null
+++ b/src/partest/scala/tools/partest/Config.scala
@@ -0,0 +1,130 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2010 LAMP/EPFL
+ */
+
+package scala.tools
+package partest
+
+import io._
+import nsc.io._
+import Properties._
+
+trait Config {
+ universe: Universe =>
+
+ lazy val src = absolutize(srcDir).toDirectory
+ lazy val build = new TestBuild()
+
+ def javaHomeEnv = envOrElse("JAVA_HOME", null)
+ def javaCmd = envOrElse("JAVACMD", "java")
+ def javacCmd = Option(javaHomeEnv) map (x => Path(x) / "bin" / "javac" path) getOrElse "javac"
+
+ /** Values related to actors. The timeouts are in seconds. On a dry
+ * run we only allocate one worker so the output isn't interspersed.
+ */
+ def workerTimeout = 3600 // 1 hour, probably overly generous
+ def testTimeout = testTimeout_ flatMap safeToInt getOrElse 900 // test timeout
+ def testWarning = testWarning_ flatMap safeToInt getOrElse (testTimeout / 10) // test warning
+ def numWorkers = if (isDryRun) 1 else propOrElse("partest.actors", "8").toInt
+ def expectedErrors = propOrElse("partest.errors", "0").toInt
+ def poolSize = (wrapAccessControl(propOrNone("actors.corePoolSize")) getOrElse "16").toInt
+
+ def allScalaFiles = src.deepFiles filter (_ hasExtension "scala")
+ def allObjDirs = src.deepDirs filter (_ hasExtension "obj")
+ def allLogFiles = src.deepFiles filter (_ hasExtension "log")
+ def allClassFiles = src.deepFiles filter (_ hasExtension "class")
+
+ class TestBuild() extends BuildContribution {
+ import nsc.util.ClassPath
+
+ /** Scala core libs.
+ */
+ val library = pathForComponent("library")
+ val compiler = pathForComponent("compiler")
+ val partest = pathForComponent("partest")
+ val scalap = pathForComponent("scalap", "%s.jar")
+
+ /** Scala supplementary libs - these are not all needed for all build targets,
+ * and some of them are copied inside other jars in later targets. However quick
+ * for instance cannot be run without some of these.
+ */
+ val fjbg = pathForLibrary("fjbg")
+ val msil = pathForLibrary("msil")
+ val forkjoin = pathForLibrary("forkjoin")
+ val scalacheck = pathForLibrary("scalacheck")
+
+ /** Other interesting paths.
+ */
+ val scalaBin = testBuildDir / "bin"
+
+ /** A hack for now to get quick running.
+ */
+ def needsForkJoin = {
+ val loader = nsc.util.ScalaClassLoader.fromURLs(List(library.toURL))
+ val fjMarker = "scala.concurrent.forkjoin.ForkJoinTask"
+ val clazz = loader.tryToLoadClass(fjMarker)
+
+ if (clazz.isDefined) debug("Loaded ForkJoinTask OK, don't need jar.")
+ else debug("Could not load ForkJoinTask, putting jar on classpath.")
+
+ clazz.isEmpty
+ }
+ lazy val forkJoinPath: List[Path] = if (needsForkJoin) List(forkjoin) else Nil
+
+ /** Internal **/
+ private def repo = partestDir.parent.normalize
+ // XXX - is this needed? Where?
+ //
+ // private val pluginOptionString = "-Xplugin:"
+ // private def updatedPluginPath(options: String): String = {
+ // val (pluginArgs, rest) = toArgs(options) partition (_ startsWith pluginOptionString)
+ // // join all plugin paths as one classpath
+ // val pluginPaths = ClassPath.join(pluginArgs map (_ stripPrefix pluginOptionString): _*)
+ // // map all paths to absolute
+ // val newPath = ClassPath.map(pluginPaths, x => absolutize(x).path)
+ // // recreate option
+ // val pluginOption = if (newPath == "") None else Some(pluginOptionString + newPath)
+ //
+ // fromArgs(rest ::: pluginOption.toList)
+ // }
+
+ private def pathForComponent(what: String, jarFormat: String = "scala-%s.jar"): Path = {
+ def asDir = testBuildDir / "classes" / what
+ def asJar = testBuildDir / "lib" / jarFormat.format(what)
+
+ if (asDir.isDirectory) asDir
+ else if (asJar.isFile) asJar
+ else ""
+ }
+ private def pathForLibrary(what: String) = File(repo / "lib" / (what + ".jar"))
+ }
+
+ def printConfigBanner() = {
+ debug("Java VM started with arguments: '%s'" format fromArgs(Process.javaVmArguments))
+ debug("System Properties:\n" + util.allPropertiesString())
+
+ normal(configBanner())
+ }
+
+ /** Treat an access control failure as None. */
+ private def wrapAccessControl[T](body: => Option[T]): Option[T] =
+ try body catch { case _: java.security.AccessControlException => None }
+
+ private def configBanner() = {
+ val javaBin = Path(javaHome) / "bin"
+ val javaInfoString = "%s (build %s, %s)".format(javaVmName, javaVmVersion, javaVmInfo)
+
+ List(
+ "Scala compiler classes in: " + testBuildDir,
+ "Scala version is: " + nsc.Properties.versionMsg,
+ "Scalac options are: " + universe.scalacOpts,
+ "Java binaries in: " + javaBin,
+ "Java runtime is: " + javaInfoString,
+ "Java runtime options: " + (Process.javaVmArguments mkString " "),
+ "Java options are: " + universe.javaOpts,
+ "Source directory is: " + src,
+ "Selected categories: " + (selectedCategories mkString " "),
+ ""
+ ) mkString "\n"
+ }
+}
diff --git a/src/partest/scala/tools/partest/Dispatcher.scala b/src/partest/scala/tools/partest/Dispatcher.scala
new file mode 100644
index 0000000000..2c7d9d6a2f
--- /dev/null
+++ b/src/partest/scala/tools/partest/Dispatcher.scala
@@ -0,0 +1,161 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2010 LAMP/EPFL
+ * @author Philipp Haller
+ */
+
+package scala.tools
+package partest
+
+import scala.tools.nsc.io._
+import scala.actors.{ Actor, TIMEOUT }
+import scala.actors.Actor._
+import scala.collection.immutable
+import scala.util.control.Exception.ultimately
+
+/** The machinery for concurrent execution of tests. Each Worker
+ * is given a bundle of tests, which it runs sequentially and then
+ * sends a report back to the dispatcher.
+ */
+trait Dispatcher {
+ partest: Universe =>
+
+ /** The public entry point. The given filter narrows down the list of
+ * tests to run.
+ */
+ def runSelection(categories: List[TestCategory], filt: TestEntity => Boolean = _ => true): CombinedTestResults = {
+ // Setting scala.home informs tests where to obtain their jars.
+ setProp("scala.home", testBuildDir.path)
+
+ val allTests = allCategories flatMap (_.enumerate)
+ val selected = allTests filter filt
+ val groups = selected groupBy (_.category)
+ val count = selected.size
+
+ if (count == 0) return CombinedTestResults(0, 0, 0)
+ else if (count == allTests.size) verbose("Running all %d tests." format count)
+ else verbose("Running %d/%d tests: %s".format(count, allTests.size, toStringTrunc(selected map (_.label) mkString ", ")))
+
+ allCategories collect { case x if groups contains x => runCategory(x, groups(x)) } reduceLeft (_ ++ _)
+ }
+
+ private def parallelizeTests(tests: List[TestEntity]): immutable.Map[TestEntity, Int] = {
+ // propagate verbosity
+ if (isDebug) scala.actors.Debug.level = 3
+
+ // "If elected, I guarantee a slice of tests for every worker!"
+ val groups = tests grouped ((tests.size / numWorkers) + 1) toList
+
+ // "Workers, line up for assignments!"
+ val workers =
+ for ((slice, workerNum) <- groups.zipWithIndex) yield {
+ returning(new Worker(workerNum)) { worker =>
+ worker.start()
+ worker ! TestsToRun(slice)
+ }
+ }
+
+ normal("Started %d workers with ~%d tests each.\n".format(groups.size, groups.head.size))
+
+ /** Listening for news from the proletariat.
+ */
+ (workers map { w =>
+ receiveWithin(workerTimeout * 1000) {
+ case ResultsOfRun(resultMap) => resultMap
+ case TIMEOUT =>
+ warning("Worker %d timed out." format w.workerNum)
+ immutable.Map[TestEntity, Int]()
+ // mark all the worker's tests as having timed out - should be hard to miss
+ groups(w.workerNum) map (_ -> 2) toMap
+ }
+ }) reduceLeft (_ ++ _)
+ }
+
+ private def runCategory(category: TestCategory, tests: List[TestEntity]): CombinedTestResults = {
+ val kind = category.kind
+ normal("%s (%s tests in %s)\n".format(category.startMessage, tests.size, category))
+
+ val (milliSeconds, resultMap) = timed2(parallelizeTests(tests))
+ val (passed, failed) = resultsToStatistics(resultMap)
+
+ CombinedTestResults(passed, failed, milliSeconds)
+ }
+
+ /** A Worker is given a bundle of tests and runs them all sequentially.
+ */
+ class Worker(val workerNum: Int) extends Actor {
+ def act() {
+ react { case TestsToRun(tests) =>
+ val master = sender
+ runTests(tests)(results => master ! ResultsOfRun(results))
+ }
+ }
+
+ /** Runs the tests. Passes the result Map to onCompletion when done.
+ */
+ private def runTests(tests: List[TestEntity])(onCompletion: immutable.Map[TestEntity, Int] => Unit) {
+ var results = new immutable.HashMap[TestEntity, Int] // maps tests to results
+ val numberOfTests = tests.size
+ val testIterator = tests.iterator
+ def processed = results.size
+ def isComplete = testIterator.isEmpty
+
+ def atThreshold(num: Double) = {
+ require(num >= 0 && num <= 1.0)
+ ((processed - 1).toDouble / numberOfTests <= num) && (processed.toDouble / numberOfTests >= num)
+ }
+
+ def extraMessage = {
+ // for now quiet for normal people
+ if (isVerbose || isTrace || isDebug) {
+ if (isComplete) "(#%d 100%%)" format workerNum
+ else if (isVerbose) "(#%d %d/%d)".format(workerNum, processed, numberOfTests)
+ else if (isTrace && atThreshold(0.5)) "(#%d 50%%)" format workerNum
+ else ""
+ }
+ else ""
+ }
+
+ def countAndReport(result: TestResult) {
+ val TestResult(test, state) = result
+ // refuse to count an entity twice
+ if (results contains test)
+ return warning("Received duplicate result for %s: was %s, now %s".format(test, results(test), state))
+
+ // increment the counter for this result state
+ results += (test -> state)
+
+ // show on screen
+ if (isDryRun) normal("\n") // blank line between dry run traces
+ else result show extraMessage
+
+ // remove log if successful
+ if (result.passed)
+ test.deleteLog()
+
+ // Respond to master if this Worker is complete
+ if (isComplete)
+ onCompletion(results)
+ }
+
+ Actor.loopWhile(testIterator.hasNext) {
+ val parent = self
+ // pick a test and set some alarms
+ val test = testIterator.next
+ val alarmer = test startAlarms (parent ! new Timeout(test))
+
+ actor {
+ ultimately(alarmer.cancelAll()) {
+ // Calling isSuccess forces the lazy val "process" inside the test, running it.
+ val res = test.isSuccess
+ // Cancel the alarms and alert the media.
+ parent ! TestResult(test, res)
+ }
+ }
+
+ react {
+ case x: TestResult => countAndReport(x)
+ }
+ }
+ }
+ }
+} \ No newline at end of file
diff --git a/src/partest/scala/tools/partest/Entities.scala b/src/partest/scala/tools/partest/Entities.scala
new file mode 100644
index 0000000000..658cfdee12
--- /dev/null
+++ b/src/partest/scala/tools/partest/Entities.scala
@@ -0,0 +1,77 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2010 LAMP/EPFL
+ * @author Philipp Haller
+ */
+
+package scala.tools
+package partest
+
+import nsc.io._
+
+trait Entities {
+ self: Universe =>
+
+ abstract class TestEntity extends AbsTestEntity
+ with TestContribution
+ with TestHousekeeping
+ with TestAlarms
+ with EntityLogging
+ with CompilableTest
+ with ScriptableTest
+ with DiffableTest {
+ def location: Path
+ def category: TestCategory
+
+ lazy val label = location.stripExtension
+ lazy val testClasspath = returning(createClasspathString())(vtrace)
+
+ /** Was this test successful? Calling this for the first time forces
+ * lazy val "process" which actually runs the test.
+ */
+ def isSuccess = process
+
+ /** Some standard files, which may or may not be present.
+ */
+ def scalaOptsFile = withExtension("flags").toFile // opts to scalac
+ def javaOptsFile = withExtension("javaopts").toFile // opts to java (but not javac)
+ def commandFile = withExtension("cmds").toFile // sequence of commands to execute
+ def logFile = withExtension("log").toFile // collected output
+
+ /** Some standard directories.
+ */
+ def outDir = withExtension("obj").toDirectory // output dir, e.g. files/pos/t14.obj
+ def categoryDir = location.parent.normalize // category dir, e.g. files/pos/
+ def sourcesDir = location ifDirectory (_.normalize) getOrElse categoryDir
+
+ /** Standard arguments for run, exec, diff.
+ */
+ def argumentsToRun = List("Test", "jvm")
+ def argumentsToExec = List(location.path)
+ def argumentsToDiff = ((checkFile, logFile))
+
+ /** Using a .cmds file for a custom test sequence.
+ */
+ def commandList = safeLines(commandFile)
+ def testSequence =
+ if (commandFile.isFile && commandList.nonEmpty) commandList map customTestStep
+ else category.testSequence
+
+ def run() = runScala(argumentsToRun)
+ def exec() = runExec(argumentsToExec)
+ def diff() = runDiff(argumentsToDiff._1, argumentsToDiff._2)
+
+ /** The memoized result of the test run.
+ */
+ private lazy val process = {
+ def preCheck = precondition || returning(false)(_ => trace("precondition failed"))
+ def allSteps = testSequence.actions forall (f => f(this))
+ val outcome = runWrappers(preCheck && allSteps)
+
+ // an empty outcome means we've been interrupted and are shutting down.
+ outcome getOrElse false
+ }
+ }
+
+ case class TestDirectory(category: TestCategory, location: Directory) extends TestEntity { }
+ case class TestFile(category: TestCategory, location: File) extends TestEntity { }
+}
diff --git a/src/partest/scala/tools/partest/Housekeeping.scala b/src/partest/scala/tools/partest/Housekeeping.scala
new file mode 100644
index 0000000000..a624ca8adb
--- /dev/null
+++ b/src/partest/scala/tools/partest/Housekeeping.scala
@@ -0,0 +1,187 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2010 LAMP/EPFL
+ */
+
+package scala.tools
+package partest
+
+import scala.util.control.Exception.catching
+import util._
+import nsc.io._
+import Process.runtime
+import Properties._
+
+/** An agglomeration of code which is low on thrills. Hopefully
+ * it operates so quietly in the background that you never have to
+ * look at this file.
+ */
+trait Housekeeping {
+ self: Universe =>
+
+ /** Orderly shutdown on ctrl-C. */
+ @volatile private var _shuttingDown = false
+ protected def setShuttingDown() = {
+ /** Whatever we want to do as shutdown begins goes here. */
+ if (!_shuttingDown) {
+ warning("Received shutdown signal, partest is cleaning up...\n")
+ _shuttingDown = true
+ }
+ }
+ def isShuttingDown = _shuttingDown
+
+ /** Execute some code with a shutdown hook in place. This is
+ * motivated by the desire not to leave the filesystem full of
+ * junk when someone ctrl-Cs a test run.
+ */
+ def withShutdownHook[T](hook: => Unit)(body: => T): Option[T] =
+ /** Java doesn't like it if you keep adding and removing shutdown
+ * hooks after shutdown has begun, so we trap the failure.
+ */
+ catching(classOf[IllegalStateException]) opt {
+ val t = new Thread() {
+ override def run() = {
+ setShuttingDown()
+ hook
+ }
+ }
+ runtime addShutdownHook t
+
+ try body
+ finally runtime removeShutdownHook t
+ }
+
+ /** Search for a directory, possibly given only a name, by starting
+ * at the current dir and walking upward looking for it at each level.
+ */
+ protected def searchForDir(name: String): Directory = {
+ val result = Path(name) ifDirectory (x => x.normalize) orElse {
+ val cwd = Directory.Current getOrElse error("user.dir property not set")
+ val dirs = cwd :: cwd.parents map (_ / name)
+
+ Path onlyDirs dirs map (_.normalize) headOption
+ }
+
+ result getOrElse error("Fatal: could not find directory '%s'" format name)
+ }
+
+ /** Paths we ignore for most purposes.
+ */
+ def ignorePath(x: Path) = {
+ (x.name startsWith ".") ||
+ (x.isDirectory && ((x.name == "lib") || x.hasExtension("obj", "svn")))
+ }
+ /** Make a possibly relative path absolute using partestDir as the base.
+ */
+ def absolutize(path: String) = Path(path) toAbsoluteWithRoot partestDir
+
+ /** Go on a deleting binge.
+ */
+ def cleanupAll() {
+ if (isNoCleanup)
+ return
+
+ val (dirCount, fileCount) = (cleanupObjDirs(), cleanupLogs() + cleanupJunk())
+ if (dirCount + fileCount > 0)
+ normal("Cleaned up %d directories and %d files.\n".format(dirCount, fileCount))
+ }
+
+ def cleanupObjDirs() = countTrue(allObjDirs collect { case x if x.exists => x.deleteRecursively() })
+ def cleanupJunk() = countTrue(allClassFiles collect { case x if x.exists => x.delete() })
+ def cleanupLogs() = countTrue(allLogFiles collect { case x if x.exists => x.delete() })
+
+ /** Look through every file in the partest directory and ask around
+ * to make sure someone knows him. Complain about strangers.
+ */
+ def validateAll() {
+ def denotesTest(p: Path) = allCategories exists (_ denotesTest p)
+ def isMSILcheck(p: Path) = p.name endsWith "-msil.check"
+
+ def analyzeCategory(cat: DirBasedCategory) = {
+ val allTests = cat.enumerate
+ val otherPaths = cat.root walkFilter (x => !ignorePath(x)) filterNot (cat denotesTest _) filterNot isMSILcheck toList
+ val count = otherPaths.size
+
+ println("Validating %d non-test paths in %s.".format(count, cat.kind))
+
+ for (path <- otherPaths) {
+ (allTests find (_ acknowledges path)) match {
+ case Some(test) => if (isVerbose) println(" OK: '%s' is claimed by '%s'".format(path, test.label))
+ case _ => println(">> Unknown path '%s'" format path)
+ }
+ }
+ }
+
+ allCategories collect { case x: DirBasedCategory => analyzeCategory(x) }
+ }
+
+ trait TestHousekeeping {
+ self: TestEntity =>
+
+ /** Calculating derived files. Given a test like
+ * files/run/foo.scala or files/run/foo/
+ * This creates paths like foo.check, foo.flags, etc.
+ */
+ def withExtension(extension: String) = categoryDir / "%s.%s".format(label, extension)
+
+ /** True for a path if this test acknowledges it belongs to this test.
+ * Overridden by some categories.
+ */
+ def acknowledges(path: Path): Boolean = {
+ val loc = location.normalize
+ val knownPaths = List(scalaOptsFile, javaOptsFile, commandFile, logFile, checkFile) ++ jarsInTestDir
+ def isContainedSource = location.isDirectory && isJavaOrScala(path) && (path.normalize startsWith loc)
+
+ (knownPaths exists (_ isSame path)) || isContainedSource
+ }
+
+ /** This test "responds to" this String. This could mean anything -- it's a
+ * way of specifying ad-hoc collections of tests to exercise only a subset of tests.
+ * At present it looks for the given String in all the test sources.
+ */
+ def respondsToString(str: String) = containsString(str)
+ def containsString(str: String) = {
+ debug("Checking %s for \"%s\"".format(sourceFiles mkString ", ", str))
+ sourceFiles map safeSlurp exists (_ contains str)
+ }
+
+ def possiblyTimed[T](body: => T): T = {
+ if (isStats) timed(recordTestTiming(label, _))(body)
+ else body
+ }
+
+ private def prepareForTestRun() = {
+ // make sure we have a clean slate
+ deleteLog(force = true)
+ if (outDir.exists)
+ outDir.deleteRecursively()
+
+ // recreate object dir
+ outDir createDirectory true
+ }
+ def deleteOutDir() = outDir.deleteRecursively()
+ def deleteShutdownHook() = { debug("Shutdown hook deleting " + outDir) ; deleteOutDir() }
+
+ protected def runWrappers[T](body: => T): Option[T] = {
+ prepareForTestRun()
+
+ withShutdownHook(deleteShutdownHook()) {
+ loggingOutAndErr {
+ val result = possiblyTimed { body }
+ if (!isNoCleanup)
+ deleteOutDir()
+
+ result
+ }
+ }
+ }
+
+ override def toString = location.path
+ override def equals(other: Any) = other match {
+ case x: TestEntity => location.normalize == x.location.normalize
+ case _ => false
+ }
+ override def hashCode = location.normalize.hashCode
+ }
+
+ private def countTrue(f: => Iterator[Boolean]) = f filter (_ == true) length
+} \ No newline at end of file
diff --git a/src/partest/scala/tools/partest/Partest.scala b/src/partest/scala/tools/partest/Partest.scala
new file mode 100644
index 0000000000..019ed270e5
--- /dev/null
+++ b/src/partest/scala/tools/partest/Partest.scala
@@ -0,0 +1,72 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2010 LAMP/EPFL
+ */
+
+package scala.tools
+package partest
+
+import nsc.io._
+import nsc.util.CommandLine
+import category.AllCategories
+
+/** Global object for a Partest run. It is completely configured by the list
+ * of arguments passed to the constructor (although there are a few properties
+ * and environment variables which can influence matters.) See PartestSpec.scala
+ * for the complete list.
+ */
+class Partest(args: List[String]) extends {
+ val parsed = PartestSpecReference(args: _*)
+} with Universe with PartestSpec with AllCategories {
+
+ debug("Partest object created with args: " + (args mkString " "))
+
+ // The abstract values from Universe.
+ lazy val testBuildDir = searchForDir(buildDir)
+ lazy val partestDir = searchForDir(rootDir)
+ lazy val allCategories = List(Pos, Neg, Run, Jvm, Res, Shootout, Scalap, Scalacheck, BuildManager, Script)
+
+ lazy val selectedCategories = if (isAllImplied) allCategories else specifiedCats
+
+ // Coarse validation of partest directory: holds a file called partest.
+ (partestDir / "partest").isFile || error("'%s' is not a valid partest directory." format partestDir)
+
+ def runSets = toArgs(parsed.getOrElse("--runsets", ""))
+ def specifiedTests = parsed.residualArgs map (x => Path(x).normalize)
+ def specifiedKinds = testKinds filter (x => isSet(x) || (runSets contains x))
+ def specifiedCats = specifiedKinds flatMap (x => allCategories find (_.kind == x))
+ def isAllImplied = isAll || (specifiedTests.isEmpty && specifiedKinds.isEmpty)
+
+ /** Assembles a filter based on command line options which restrict the test set
+ * --grep limits to only matching tests
+ * --failed limits to only recently failed tests (log file is present)
+ * --<category> limits to only the given tests and categories (but --all overrides)
+ * path/to/Test limits to only the given tests and categories
+ */
+ lazy val filter = {
+ def indivFilter(test: TestEntity) = specifiedTests contains test.location.normalize
+ def categoryFilter(test: TestEntity) = specifiedCats contains test.category
+ def indivOrCat(test: TestEntity) = isAllImplied || indivFilter(test) || categoryFilter(test) // combines previous two
+
+ def failedFilter(test: TestEntity) = !isFailed || (test.logFile exists)
+ def grepFilter(test: TestEntity) = grepExpr.isEmpty || (test containsString grepExpr.get)
+ def combinedFilter(x: TestEntity) = indivOrCat(x) && failedFilter(x) && grepFilter(x) // combines previous three
+
+ combinedFilter _
+ }
+
+ def launchTestSuite() = runSelection(selectedCategories, filter)
+}
+
+object Partest {
+ def fromBuild(dir: String, args: String*): Partest = apply("--builddir" +: dir +: args: _*)
+ def apply(args: String*): Partest = new Partest(args.toList)
+
+ // builds without partest jars won't actually work
+ def starr() = fromBuild("")
+ def locker() = fromBuild("build/locker")
+ def quick() = fromBuild("build/quick")
+ def pack() = fromBuild("build/pack")
+ def strap() = fromBuild("build/strap")
+ def dist() = fromBuild("dists/latest")
+}
+
diff --git a/src/partest/scala/tools/partest/PartestSpec.scala b/src/partest/scala/tools/partest/PartestSpec.scala
new file mode 100644
index 0000000000..a8a1d9b0cb
--- /dev/null
+++ b/src/partest/scala/tools/partest/PartestSpec.scala
@@ -0,0 +1,108 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2010 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools
+package partest
+
+import Properties._
+import nsc.io._
+import nsc.util.{ CommandLine, CommandLineSpec, CommandLineReferenceSpec }
+
+/** This takes advantage of bits of scala goodness to fully define a command
+ * line program with a minimum of duplicated code. When the specification object
+ * is created, the vals are evaluated in order and each of them side effects
+ * a private accumulator. What emerges is a full list of the valid unary
+ * and binary arguments, as well as autogenerated help.
+ */
+trait PartestSpec extends CommandLineSpec {
+ override def isPassthroughProperty(key: String) = key == "partest.options"
+ override def isSysPropOption(key: String) = {
+ val segments = (key split '.').toList
+ if (segments.size == 2 && segments.head == "partest") Some(segments.last)
+ else None
+ }
+
+ private var _testKinds: List[String] = Nil
+ private def kind(s: String) = returning(s)(_testKinds +:= _)
+
+ def testKinds = _testKinds
+ def versionMsg = Properties.versionMsg
+
+ help("""
+ |Usage: partest [<options>] [<test> <test> ...]
+ | <test>: a path to a test designator, typically a .scala file or a directory.
+ | Examples: files/pos/test1.scala, files/res/bug785""")
+
+ heading ("Test categories:")
+ val isAll = ("all" / "run all tests (default, unless no options given)" ?)
+ (kind("pos") / "Compile files that are expected to build" ?)
+ (kind("neg") / "Compile files that are expected to fail" ?)
+ (kind("run") / "Test JVM backend" ?)
+ (kind("jvm") / "Test JVM backend" ?)
+ (kind("res") / "Run resident compiler scenarii" ?)
+ (kind("buildmanager") / "Run Build Manager scenarii" ?)
+ (kind("scalacheck") / "Run Scalacheck tests" ?)
+ (kind("script") / "Run script files" ?)
+ (kind("shootout") / "Run shootout tests" ?)
+ (kind("scalap") / "Run scalap tests" ?)
+
+ heading ("""Test "smart" categories:""")
+ val grepExpr = "grep" / "run all tests with a source file containing <expr>" >>
+ val isFailed = "failed" / "run all tests which failed on the last run" ?
+
+ heading ("Specifying paths and additional flags, ~ means repository root:")
+ val rootDir = "rootdir" / "path from ~ to partest (default: test)" |> "test"
+ val buildDir = "builddir" / "path from ~ to test build (default: build/pack)" |> "build/pack"
+ val srcDir = "srcdir" / "path from --rootdir to sources (default: files)" |> "files"
+ val javaOpts = "javaopts" / "flags to java on all runs (overrides JAVA_OPTS)" |> envOrElse("JAVA_OPTS", "")
+ val scalacOpts = "scalacopts" / "flags to scalac on all tests (overrides SCALAC_OPTS)" |> envOrElse("SCALAC_OPTS", "")
+
+ ("pack" / "alias for --builddir build/pack") ?+> List("--builddir", "build/pack")
+ ("quick" / "alias for --builddir build/quick") ?+> List("--builddir", "build/quick")
+
+ heading ("Options influencing output:")
+ val isTrace = "trace" / "show the individual steps taken by each test" ?
+ val isShowDiff = "show-diff" / "show diff between log and check file" ?
+ val isShowLog = "show-log" / "show log on failures" ?
+ val isDryRun = "dry-run" / "do not run tests, only show their traces." ?
+ val isTerse = "terse" / "be less verbose (almost silent except for failures)" ?
+ val isVerbose = "verbose" / "be more verbose (additive with --trace)" ?
+ val isDebug = "debug" / "maximum debugging output" ?
+ val isAnsi = "ansi" / "print output in color" ?
+
+ heading ("Other options:")
+ val timeout_ = "timeout" / "Overall timeout in seconds" |> "14400"
+ val testWarning_ = "test-warning" / "Test warning in seconds" >> ; // defaults to testTimeout / 10
+ val testTimeout_ = "test-timeout" / "Test timeout in seconds" >> ; // defaults to 900
+ val isCleanup = "cleanup" / "delete all stale files and dirs before run" ?
+ val isNoCleanup = "nocleanup" / "do not delete any logfiles or object dirs" ?
+ val isStats = "stats" / "collect and print statistics about the tests" ?
+ val isValidate = "validate" / "examine test filesystem for inconsistencies" ?
+ val isVersion = "version" / "print version" ?
+
+ // no help for anything below this line - secret options
+ // mostly intended for property configuration.
+ val runsets = "runsets" |> ""
+ val isNoAlarms = ("noalarms" ?)
+ val isInsideAnt = ("is-in-ant" ?)
+}
+
+object PartestSpecReference extends PartestSpec with CommandLineReferenceSpec {
+ import CommandLineSpec._
+
+ def parsed: CommandLine = null
+ override def creator(args: List[String]) =
+ new ThisCommandLine(args) {
+ override def onlyKnownOptions = true
+ override def errorFn(msg: String) = printAndExit("Error: " + msg)
+ }
+
+ def main(args: Array[String]): Unit = println(bashCompletion("partest"))
+
+ /** Append bash completion for partest to the given file.
+ */
+ def appendCompletionTo(f: File) = f appendAll bashCompletion("partest")
+}
+
diff --git a/src/partest/scala/tools/partest/PartestTask.scala b/src/partest/scala/tools/partest/PartestTask.scala
deleted file mode 100644
index 59781b0aa2..0000000000
--- a/src/partest/scala/tools/partest/PartestTask.scala
+++ /dev/null
@@ -1,250 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala Parallel Testing **
-** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-package scala.tools.partest
-
-import scala.actors.Actor._
-
-import java.io.File
-import java.net.URLClassLoader
-
-import org.apache.tools.ant.Task
-import org.apache.tools.ant.types.{Path, Reference, FileSet}
-
-class PartestTask extends Task {
-
- def addConfiguredPosTests(input: FileSet) {
- posFiles = Some(input)
- }
-
- def addConfiguredNegTests(input: FileSet) {
- negFiles = Some(input)
- }
-
- def addConfiguredRunTests(input: FileSet) {
- runFiles = Some(input)
- }
-
- def addConfiguredJvmTests(input: FileSet) {
- jvmFiles = Some(input)
- }
-
- def addConfiguredResidentTests(input: FileSet) {
- residentFiles = Some(input)
- }
-
- def addConfiguredScriptTests(input: FileSet) {
- scriptFiles = Some(input)
- }
-
- def addConfiguredShootoutTests(input: FileSet) {
- shootoutFiles = Some(input)
- }
-
- def addConfiguredScalapTests(input: FileSet) {
- scalapFiles = Some(input)
- }
-
- def setClasspath(input: Path) {
- if (classpath.isEmpty)
- classpath = Some(input)
- else
- classpath.get.append(input)
- }
-
- def createClasspath(): Path = {
- if (classpath.isEmpty) classpath = Some(new Path(getProject()))
- classpath.get.createPath()
- }
-
- def setClasspathref(input: Reference) {
- createClasspath().setRefid(input)
- }
-
- def setShowLog(input: Boolean) {
- showLog = input
- }
-
- def setShowDiff(input: Boolean) {
- showDiff = input
- }
-
- def setErrorOnFailed(input: Boolean) {
- errorOnFailed = input
- }
-
- def setJavaCmd(input: File) {
- javacmd = Some(input)
- }
-
- def setJavacCmd(input: File) {
- javaccmd = Some(input)
- }
-
- def setScalacOpts(opts: String) {
- scalacOpts = Some(opts)
- }
-
- def setTimeout(delay: String) {
- timeout = Some(delay)
- }
-
- def setDebug(input: Boolean) {
- debug = input
- }
-
- private var classpath: Option[Path] = None
- private var javacmd: Option[File] = None
- private var javaccmd: Option[File] = None
- private var showDiff: Boolean = false
- private var showLog: Boolean = false
- private var runFailed: Boolean = false
- private var posFiles: Option[FileSet] = None
- private var negFiles: Option[FileSet] = None
- private var runFiles: Option[FileSet] = None
- private var jvmFiles: Option[FileSet] = None
- private var residentFiles: Option[FileSet] = None
- private var scriptFiles: Option[FileSet] = None
- private var shootoutFiles: Option[FileSet] = None
- private var scalapFiles: Option[FileSet] = None
- private var errorOnFailed: Boolean = false
- private var scalacOpts: Option[String] = None
- private var timeout: Option[String] = None
- private var debug = false
-
- private def getFiles(fileSet: Option[FileSet]): Array[File] =
- if (fileSet.isEmpty) Array()
- else {
- val files = fileSet.get
- files.getDirectoryScanner(getProject).getIncludedFiles map {
- fs => new File(files.getDir(getProject), fs)
- }
- }
-
- private def getFilesAndDirs(fileSet: Option[FileSet]): Array[File] =
- if (!fileSet.isEmpty) {
- val files = fileSet.get
- val fileTests = getFiles(fileSet)
- val dir = files.getDir(getProject)
- val dirTests = dir.listFiles(new java.io.FileFilter {
- def accept(file: File) =
- file.isDirectory &&
- (!file.getName().equals(".svn")) &&
- (!file.getName().endsWith(".obj"))
- })
- (dirTests ++ fileTests).toArray
- }
- else
- Array()
-
- private def getPosFiles = getFilesAndDirs(posFiles)
- private def getNegFiles = getFilesAndDirs(negFiles)
- private def getRunFiles = getFiles(runFiles)
- private def getJvmFiles = getFilesAndDirs(jvmFiles)
- private def getResidentFiles = getFiles(residentFiles)
- private def getScriptFiles = getFiles(scriptFiles)
- private def getShootoutFiles = getFiles(shootoutFiles)
- private def getScalapFiles = getFiles(scalapFiles)
-
- override def execute() {
- if (debug)
- System.setProperty("partest.debug", "true")
-
- if (classpath.isEmpty)
- error("Mandatory attribute 'classpath' is not set.")
-
- val scalaLibrary =
- (classpath.get.list map { fs => new File(fs) }) find { f =>
- f.getName match {
- case "scala-library.jar" => true
- case "library" if (f.getParentFile.getName == "classes") => true
- case _ => false
- }
- }
-
- if (scalaLibrary.isEmpty)
- error("Provided classpath does not contain a Scala library.")
-
- val classloader = this.getClass.getClassLoader
-
- val antRunner: AnyRef =
- classloader.loadClass("scala.tools.partest.nest.AntRunner").newInstance().asInstanceOf[AnyRef]
- val antFileManager: AnyRef =
- antRunner.getClass.getMethod("fileManager", Array[Class[_]](): _*).invoke(antRunner, Array[Object](): _*)
-
- val runMethod =
- antRunner.getClass.getMethod("reflectiveRunTestsForFiles", Array(classOf[Array[File]], classOf[String]): _*)
-
- def runTestsForFiles(kindFiles: Array[File], kind: String): (Int, Int) = {
- val result = runMethod.invoke(antRunner, Array(kindFiles, kind): _*).asInstanceOf[Int]
- (result >> 16, result & 0x00FF)
- }
-
- def setFileManagerBooleanProperty(name: String, value: Boolean) {
- val setMethod =
- antFileManager.getClass.getMethod(name+"_$eq", Array(classOf[Boolean]): _*)
- setMethod.invoke(antFileManager, Array(java.lang.Boolean.valueOf(value)).asInstanceOf[Array[Object]]: _*)
- }
-
- def setFileManagerStringProperty(name: String, value: String) {
- val setMethod =
- antFileManager.getClass.getMethod(name+"_$eq", Array(classOf[String]): _*)
- setMethod.invoke(antFileManager, Array(value).asInstanceOf[Array[Object]]: _*)
- }
-
- setFileManagerBooleanProperty("showDiff", showDiff)
- setFileManagerBooleanProperty("showLog", showLog)
- setFileManagerBooleanProperty("failed", runFailed)
- if (!javacmd.isEmpty)
- setFileManagerStringProperty("JAVACMD", javacmd.get.getAbsolutePath)
- if (!javaccmd.isEmpty)
- setFileManagerStringProperty("JAVAC_CMD", javaccmd.get.getAbsolutePath)
- setFileManagerStringProperty("CLASSPATH", classpath.get.list.mkString(File.pathSeparator))
- setFileManagerStringProperty("LATEST_LIB", scalaLibrary.get.getAbsolutePath)
- if (!scalacOpts.isEmpty)
- setFileManagerStringProperty("SCALAC_OPTS", scalacOpts.get)
- if (!timeout.isEmpty)
- setFileManagerStringProperty("timeout", timeout.get)
-
- type TFSet = (Array[File], String, String)
- val testFileSets = List(
- (getPosFiles, "pos", "Compiling files that are expected to build"),
- (getNegFiles, "neg", "Compiling files that are expected to fail"),
- (getRunFiles, "run", "Compiling and running files"),
- (getJvmFiles, "jvm", "Compiling and running files"),
- (getResidentFiles, "res", "Running resident compiler scenarii"),
- (getScriptFiles, "script", "Running script files"),
- (getShootoutFiles, "shootout", "Running shootout tests"),
- (getScalapFiles, "scalap", "Running scalap tests")
- )
-
- def runSet(set: TFSet): (Int, Int) = {
- val (files, name, msg) = set
- if (files.isEmpty) (0, 0)
- else {
- log(msg)
- runTestsForFiles(files, name)
- }
- }
-
- val _results = testFileSets map runSet
- val allSuccesses = _results map (_._1) sum
- val allFailures = _results map (_._2) sum
-
- def f = if (errorOnFailed && allFailures > 0) error(_) else log(_: String)
- def s = if (allFailures > 1) "s" else ""
- val msg =
- if (allFailures > 0) "Test suite finished with %d case%s failing.".format(allFailures, s)
- else if (allSuccesses == 0) "There were no tests to run."
- else "Test suite finished with no failures."
-
- f(msg)
- }
-}
diff --git a/src/partest/scala/tools/partest/utils/Properties.scala b/src/partest/scala/tools/partest/Properties.scala
index 237ddea14e..4eeb0359ec 100644
--- a/src/partest/scala/tools/partest/utils/Properties.scala
+++ b/src/partest/scala/tools/partest/Properties.scala
@@ -8,8 +8,8 @@
// $Id$
-package scala.tools.partest
-package utils
+package scala.tools
+package partest
/** Loads partest.properties from the jar. */
object Properties extends scala.util.PropertiesTrait {
diff --git a/src/partest/scala/tools/partest/Results.scala b/src/partest/scala/tools/partest/Results.scala
new file mode 100644
index 0000000000..4e0c446788
--- /dev/null
+++ b/src/partest/scala/tools/partest/Results.scala
@@ -0,0 +1,113 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2010 LAMP/EPFL
+ */
+
+package scala.tools
+package partest
+
+import scala.collection.immutable
+
+trait Results {
+ self: Universe =>
+
+ /** A collection of tests for a Worker.
+ */
+ case class TestsToRun(entities: List[TestEntity])
+
+ /** The response from a Worker who has been given TestsToRun.
+ */
+ case class ResultsOfRun(results: immutable.Map[TestEntity, Int])
+
+ /** The result of a single test. (0: OK, 1: FAILED, 2: TIMEOUT)
+ */
+ sealed abstract class TestResult(val state: Int, val description: String) {
+ def entity: TestEntity
+
+ def passed = state == 0
+ def colorize(s: String): String
+ def show(msg: String) =
+ if (!isShuttingDown)
+ showResult(colorize(description), msg)
+
+ private def outputPrefix = if (isInsideAnt) "" else markNormal("partest: ")
+ private def name = src relativize entity.location // e.g. "neg/test.scala"
+ private def showResult(status: String, extraMsg: String) =
+ normal(outputPrefix + "[...]/%-40s [%s] %s\n".format(name, status, extraMsg))
+
+ override def equals(other: Any) = other match {
+ case x: TestResult => entity == x.entity
+ case _ => false
+ }
+ override def hashCode = entity.hashCode
+ override def toString = "%s (%s)".format(entity, if (passed) "passed" else "failed")
+ }
+
+ class Success(val entity: TestEntity) extends TestResult(0, " OK ") {
+ def colorize(s: String) = markSuccess(s)
+ override def show(msg: String) = if (!isTerse) super.show(msg)
+ }
+ class Failure(val entity: TestEntity) extends TestResult(1, " FAILED ") {
+ def colorize(s: String) = markFailure(s)
+
+ override def show(msg: String) = {
+ super.show(msg)
+
+ if (isShowDiff || isTrace)
+ normal(entity.diffOutput())
+
+ if (isShowLog || isTrace)
+ normal(toStringTrunc(entity.failureMessage(), 1600))
+ }
+ }
+ class Timeout(val entity: TestEntity) extends TestResult(2, "TIME OUT") {
+ def colorize(s: String) = markFailure(s)
+ }
+
+ object TestResult {
+ def apply(entity: TestEntity, success: Boolean) =
+ if (success) new Success(entity)
+ else new Failure(entity)
+
+ def apply(entity: TestEntity, state: Int) = state match {
+ case 0 => new Success(entity)
+ case 1 => new Failure(entity)
+ case 2 => new Timeout(entity)
+ }
+ def unapply(x: Any) = x match {
+ case x: TestResult => Some((x.entity, x.state))
+ case _ => None
+ }
+ }
+
+ /** The combined results of any number of tests.
+ */
+ case class CombinedTestResults(
+ passed: Int,
+ failed: Int,
+ elapsedMilliseconds: Long
+ ) {
+ // housekeeping
+ val elapsedSecs = elapsedMilliseconds / 1000
+ val elapsedMins = elapsedSecs / 60
+ val elapsedHrs = elapsedMins / 60
+ val dispMins = elapsedMins - elapsedHrs * 60
+ val dispSecs = elapsedSecs - elapsedMins * 60
+
+ def total = passed + failed
+ def hasFailures = failed > 0
+ def exitCode = if (expectedErrors == failed) 0 else 1
+
+ def ++(x: CombinedTestResults) = CombinedTestResults(
+ passed + x.passed,
+ failed + x.failed,
+ elapsedMilliseconds + x.elapsedMilliseconds
+ )
+
+ def elapsedString = "%02d:%02d:%02d".format(elapsedHrs, dispMins, dispSecs)
+ override def toString =
+ if (total == 0) "There were no tests to run."
+ else if (isDryRun) "%d tests would be run." format total
+ else if (hasFailures) "%d of %d tests failed (elapsed time: %s)".format(failed, total, elapsedString)
+ else "All %d tests were successful (elapsed time: %s)".format(total, elapsedString)
+ }
+} \ No newline at end of file
diff --git a/src/partest/scala/tools/partest/Runner.scala b/src/partest/scala/tools/partest/Runner.scala
new file mode 100644
index 0000000000..7f67c93478
--- /dev/null
+++ b/src/partest/scala/tools/partest/Runner.scala
@@ -0,0 +1,39 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2010 LAMP/EPFL
+ * @author Philipp Haller
+ */
+
+package scala.tools
+package partest
+
+import nsc.io._
+
+object Runner {
+ def main(mainArgs: Array[String]) {
+ val propArgs = PartestSpecReference.sysPropsAsOptions()
+ val args = (propArgs ++ mainArgs).toList
+ val runner = Partest(args: _*)
+ import runner._
+
+ if (isVersion) return println(versionMsg)
+ if (args.isEmpty) return println(helpMsg)
+ if (isValidate) return validateAll()
+
+ printConfigBanner()
+
+ if (isCleanup)
+ cleanupAll()
+
+ val result = launchTestSuite()
+ val exitCode = result.exitCode
+ val message = "\n" + result + "\n"
+
+ if (exitCode == 0) success(message)
+ else failure(message)
+
+ if (isStats)
+ showTestStatistics()
+
+ System exit exitCode
+ }
+}
diff --git a/src/partest/scala/tools/partest/Statistics.scala b/src/partest/scala/tools/partest/Statistics.scala
new file mode 100644
index 0000000000..2ea3c6e8f0
--- /dev/null
+++ b/src/partest/scala/tools/partest/Statistics.scala
@@ -0,0 +1,46 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2010 LAMP/EPFL
+ * @author Philipp Haller
+ */
+
+package scala.tools
+package partest
+
+import scala.collection.mutable.HashMap
+
+trait Statistics {
+ /** Only collected when --stats is given. */
+ lazy val testStatistics = new HashMap[String, Long]
+
+ /** Given function and block of code, evaluates code block,
+ * calls function with milliseconds elapsed, and returns block result.
+ */
+ def timed[T](f: Long => Unit)(body: => T): T = {
+ val start = System.currentTimeMillis
+ val result = body
+ val end = System.currentTimeMillis
+
+ f(end - start)
+ result
+ }
+ /** Times body and returns both values.
+ */
+ def timed2[T](body: => T): (Long, T) = {
+ var milliSeconds = 0L
+ val result = timed(x => milliSeconds = x)(body)
+
+ (milliSeconds, result)
+ }
+
+ def resultsToStatistics(results: Iterable[(_, Int)]): (Int, Int) =
+ (results partition (_._2 == 0)) match {
+ case (winners, losers) => (winners.size, losers.size)
+ }
+
+ def recordTestTiming(name: String, milliseconds: Long) =
+ synchronized { testStatistics(name) = milliseconds }
+
+ def showTestStatistics() {
+ testStatistics.toList sortBy (-_._2) foreach { case (k, v) => println("%s: %.2f seconds".format(k, (v.toDouble / 1000))) }
+ }
+}
diff --git a/src/partest/scala/tools/partest/Universe.scala b/src/partest/scala/tools/partest/Universe.scala
new file mode 100644
index 0000000000..557d48fe54
--- /dev/null
+++ b/src/partest/scala/tools/partest/Universe.scala
@@ -0,0 +1,101 @@
+/* __ *\
+** ________ ___ / / ___ Scala Parallel Testing **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.tools
+package partest
+
+import nsc.io._
+import category.AllCategories
+import io.Logging
+
+/** The high level view of the partest infrastructure.
+ */
+abstract class Universe
+ extends Entities
+ with BuildContributors
+ with Logging
+ with Dispatcher
+ with Statistics
+ with Housekeeping
+ with Results
+ with PartestCompilation
+ with PartestSpec
+ with Config
+ with Alarms
+ with Actions
+ with Categories {
+
+ /** The abstract values from which all else is derived. */
+ def partestDir: Directory
+ def testBuildDir: Directory
+ def allCategories: List[TestCategory]
+ def selectedCategories: List[TestCategory]
+
+ /** Some plausibly abstract types. */
+ type TestBuild <: BuildContributor // e.g. quick, pack
+ type TestCategory <: AbsTestCategory // e.g. pos, neg, run
+ type TestEntity <: AbsTestEntity // e.g. files/pos/test25.scala
+ type TestSequence <: AbsTestSequence // e.g. compile, run, diff
+
+ /** Although TestStep isn't much more than Function1 right now,
+ * it exists this way so it can become more capable.
+ */
+ implicit def f1ToTestStep(f: TestEntity => Boolean): TestStep =
+ new TestStep { def apply(test: TestEntity) = f(test) }
+
+ abstract class TestStep extends (TestEntity => Boolean) {
+ def apply(test: TestEntity): Boolean
+ }
+
+ /** An umbrella category of tests, such as "pos" or "run".
+ */
+ trait AbsTestCategory extends BuildContributor {
+ type TestSettings
+
+ def kind: String
+ def testSequence: TestSequence
+ def denotesTest(location: Path): Boolean
+
+ def createTest(location: Path): TestEntity
+ def createSettings(entity: TestEntity): TestSettings
+ def enumerate: List[TestEntity]
+ }
+
+ /** A single test. It may involve multiple files, but only a
+ * single path is used to designate it.
+ */
+ trait AbsTestEntity extends BuildContributor {
+ def category: TestCategory
+ def location: Path
+ def onException(x: Throwable): Unit
+ def testClasspath: String
+
+ /** Any preconditions before running the test. Test fails
+ * immediately if this returns false.
+ */
+ def precondition: Boolean = true
+
+ /** Most tests will use the sequence defined by the category,
+ * but the test can override and define a custom sequence.
+ */
+ def testSequence: TestSequence
+
+ /** True if this test recognizes the given path as a piece of it.
+ * For validation purposes.
+ */
+ def acknowledges(path: Path): Boolean
+ }
+
+ /** Every TestEntity is partly characterized by a series of actions
+ * which are applied to the TestEntity in the given order. The test
+ * passes if all those actions return true, fails otherwise.
+ */
+ trait AbsTestSequence {
+ def actions: List[TestStep]
+ }
+} \ No newline at end of file
diff --git a/src/partest/scala/tools/partest/ant/JavaTask.scala b/src/partest/scala/tools/partest/ant/JavaTask.scala
new file mode 100644
index 0000000000..0bebf91368
--- /dev/null
+++ b/src/partest/scala/tools/partest/ant/JavaTask.scala
@@ -0,0 +1,55 @@
+/* __ *\
+** ________ ___ / / ___ Scala Parallel Testing **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.tools
+package partest
+package ant
+
+import org.apache.tools.ant.Task
+import org.apache.tools.ant.taskdefs.Java
+import org.apache.tools.ant.types.{ EnumeratedAttribute, Commandline, Environment, PropertySet }
+
+import scala.tools.nsc.io._
+import scala.tools.nsc.util.{ ClassPath, CommandLineSpec }
+import CommandLineSpec._
+
+class JavaTask extends Java {
+ override def getTaskName() = "partest"
+ private val scalaRunnerClass = "scala.tools.nsc.MainGenericRunner"
+
+ protected def rootDir = prop("partest.rootdir") getOrElse (baseDir / "test").path
+ protected def partestJVMArgs = prop("partest.jvm.args") getOrElse "-Xms64M -Xmx768M -Xss768K -XX:MaxPermSize=96M"
+ protected def runnerArgs = List("-usejavacp", "scala.tools.partest.Runner", "--javaopts", partestJVMArgs)
+
+ private def baseDir = Directory(getProject.getBaseDir)
+ private def prop(s: String) = Option(getProject getProperty s)
+ private def jvmline(s: String) = returning(createJvmarg())(_ setLine s)
+ private def addArg(s: String) = returning(createArg())(_ setValue s)
+
+ private def newKeyValue(key: String, value: String) =
+ returning(new Environment.Variable)(x => { x setKey key ; x setValue value })
+
+ def setDefaults() {
+ setFork(true)
+ setFailonerror(true)
+ getProject.setSystemProperties()
+ setClassname(scalaRunnerClass)
+ addSysproperty(newKeyValue("partest.is-in-ant", "true"))
+ jvmline(partestJVMArgs)
+ runnerArgs foreach addArg
+
+ // do we want basedir or rootDir to be the cwd?
+ // setDir(Path(rootDir).jfile)
+ }
+
+ override def init() = {
+ super.init()
+ setDefaults()
+ }
+}
+
diff --git a/src/partest/scala/tools/partest/ant/PartestTask.scala b/src/partest/scala/tools/partest/ant/PartestTask.scala
new file mode 100644
index 0000000000..65848fabb0
--- /dev/null
+++ b/src/partest/scala/tools/partest/ant/PartestTask.scala
@@ -0,0 +1,90 @@
+/* __ *\
+** ________ ___ / / ___ Scala Parallel Testing **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+/**** Note -- this isn't used anymore, but I left it in for the moment. ****/
+
+package scala.tools
+package partest
+package ant
+
+import java.io.{ File => JFile }
+
+import org.apache.tools.ant.Task
+import org.apache.tools.ant.types.{ Reference, FileSet}
+
+import scala.reflect.BeanProperty
+import scala.tools.ant.sabbus.CompilationPathProperty
+import scala.tools.nsc.io
+import scala.tools.nsc.util.CommandLineSpec._
+
+class PartestTask extends Task with CompilationPathProperty {
+ /** Used only in ant task */
+ @BeanProperty protected var errorOnFailed: Boolean = _
+ @BeanProperty protected var jUnitReportDir: JFile = _
+
+ /** Propagated to partest run via system properties */
+ @BeanProperty protected var debug: Boolean = _
+ @BeanProperty protected var javaOpts: String = _
+ @BeanProperty protected var partestOpts: String = _
+ @BeanProperty protected var runSets: String = _
+ @BeanProperty protected var scalacOpts: String = _
+ @BeanProperty protected var showDiff: Boolean = _
+ @BeanProperty protected var showLog: Boolean = _
+ @BeanProperty protected var srcDir: String = _
+ @BeanProperty protected var timeout: Int = _
+
+ /** Translating ant information into command line arguments. */
+ private def notEmpty(s: String) = s != null && s.length > 0
+ private def quoted(s: String) = if (s exists (_.isWhitespace)) "\"" + s.trim + "\"" else s
+ private def optionCollection = List[(Boolean, () => List[String])](
+ debug -> (() => List("--debug")),
+ showLog -> (() => List("--show-log")),
+ showDiff -> (() => List("--show-diff")),
+ (timeout > 0) -> (() => List("--timeout", timeout.toString)),
+ notEmpty(javaOpts) -> (() => List("--javaopts", javaOpts)),
+ notEmpty(scalacOpts) -> (() => List("--scalacopts", scalacOpts)),
+ notEmpty(srcDir) -> (() => List("--srcdir", srcDir)),
+ notEmpty(partestOpts) -> (() => toArgs(partestOpts))
+ )
+
+ private def antPropOrNone(name: String) = Option(getProject getProperty name)
+ private def antPropsToCommandLine() = {
+ setProp("partest.isInAnt", "true")
+ val partestDir = antPropOrNone("partest.dir") getOrElse error("Mandatory attribute 'partest.dir' is not set.")
+
+ val root = List("--rootdir", io.Path(partestDir).path)
+ val opts = optionCollection collect { case (true, f) => f() } flatten
+ val sets = Option(runSets).toList flatMap toArgs map toOpt
+
+ root ++ opts ++ sets
+ }
+ private def antRunTests() = {
+ val args = antPropsToCommandLine()
+ val runner = Partest(args: _*)
+ import runner._
+
+ normal("Ant options translate to command line: partest " + fromArgs(args))
+ printConfigBanner()
+
+ val result = launchTestSuite()
+ val msg = result.toString
+
+ if (result.hasFailures && errorOnFailed) error(msg)
+ else log(msg)
+ }
+
+ override def execute() {
+ try antRunTests()
+ catch {
+ case x =>
+ System.err.println("Uncaught exception %s in partest ant ask: aborting." format x)
+ x.printStackTrace()
+ throw x
+ }
+ }
+}
diff --git a/src/partest/scala/tools/partest/antlib.xml b/src/partest/scala/tools/partest/antlib.xml
index b3b98e853f..af36f11368 100644
--- a/src/partest/scala/tools/partest/antlib.xml
+++ b/src/partest/scala/tools/partest/antlib.xml
@@ -1,4 +1,3 @@
<antlib>
- <taskdef name="partest"
- classname="scala.tools.partest.PartestTask"/>
+ <taskdef name="partest" classname="scala.tools.partest.ant.JavaTask"/>
</antlib>
diff --git a/src/partest/scala/tools/partest/category/AllCategories.scala b/src/partest/scala/tools/partest/category/AllCategories.scala
new file mode 100644
index 0000000000..ce6573123a
--- /dev/null
+++ b/src/partest/scala/tools/partest/category/AllCategories.scala
@@ -0,0 +1,20 @@
+/* __ *\
+** ________ ___ / / ___ Scala Parallel Testing **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.tools
+package partest
+package category
+
+trait AllCategories extends Compiler with Analysis with Runner {
+ self: Universe =>
+
+ object Pos extends DirBasedCategory("pos") { lazy val testSequence: TestSequence = List(compile) }
+ object Neg extends DirBasedCategory("neg") { lazy val testSequence: TestSequence = List(not(compile), diff) }
+ object Run extends DirBasedCategory("run") { lazy val testSequence: TestSequence = List(compile, run, diff) }
+ object Jvm extends DirBasedCategory("jvm") { lazy val testSequence: TestSequence = List(compile, run, diff) }
+}
diff --git a/src/partest/scala/tools/partest/category/Analysis.scala b/src/partest/scala/tools/partest/category/Analysis.scala
new file mode 100644
index 0000000000..f2b43ebf6d
--- /dev/null
+++ b/src/partest/scala/tools/partest/category/Analysis.scala
@@ -0,0 +1,65 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2010 LAMP/EPFL
+ */
+
+package scala.tools
+package partest
+package category
+
+import java.lang.{ ClassLoader => JavaClassLoader }
+import java.net.URL
+import nsc.util.ScalaClassLoader
+import nsc.io._
+
+class PartestClassLoader(urls: Array[URL], parent: JavaClassLoader) extends ScalaClassLoader.URLClassLoader(urls, parent) {
+ def this(urls: Array[URL]) = this(urls, null)
+ def bytes(path: String) = findBytesForClassName(path)
+ def singleton(path: String) = tryToInitializeClass(path).get getField "MODULE$" get null
+
+ /** Calls a method in an object via reflection.
+ */
+ def apply[T](className: String, methodName: String)(args: Any*): T = {
+ def fail = error("Reflection failed on %s.%s".format(className, methodName))
+ val clazz = tryToLoadClass(className) getOrElse fail
+ val obj = singleton(className)
+ val m = clazz.getMethods find (x => x.getName == methodName && x.getParameterTypes.size == args.size) getOrElse fail
+
+ m.invoke(obj, args map (_.asInstanceOf[AnyRef]): _*).asInstanceOf[T]
+ }
+}
+
+trait Analysis {
+ self: Universe =>
+
+ object Scalap extends DirBasedCategory("scalap") {
+ val testSequence: TestSequence = List(compile, run, diff)
+ override def denotesTest(p: Path) = p.isDirectory && (p.toDirectory.files exists (_.name == "result.test"))
+ override def createTest(location: Path) = new ScalapTest(location)
+
+ class ScalapTest(val location: Path) extends TestEntity {
+ val category = Scalap
+ val scalapMain = "scala.tools.scalap.Main$"
+ val scalapMethod = "decompileScala"
+
+ override def classpathPaths = super.classpathPaths :+ build.scalap
+ override def checkFile = File(location / "result.test")
+ override def precondition = checkFile.isFile && super.precondition
+
+ private def runnerURLs = build.classpathPaths ::: classpathPaths map (_.toURL)
+ private def createClassLoader = new PartestClassLoader(runnerURLs.toArray, this.getClass.getClassLoader)
+
+ val isPackageObject = containsString("package object")
+ val suffix = if (isPackageObject) ".package" else ""
+ val className = location.name.capitalize + suffix
+
+ override def run() = loggingResult {
+ def loader = createClassLoader
+ def bytes = loader.bytes(className)
+
+ trace("scalap %s".format(className))
+ if (isDryRun) ""
+ else loader[String](scalapMain, scalapMethod)(bytes, isPackageObject)
+ }
+ }
+ }
+}
diff --git a/src/partest/scala/tools/partest/category/Compiler.scala b/src/partest/scala/tools/partest/category/Compiler.scala
new file mode 100644
index 0000000000..58fd8230e2
--- /dev/null
+++ b/src/partest/scala/tools/partest/category/Compiler.scala
@@ -0,0 +1,142 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2010 LAMP/EPFL
+ */
+
+package scala.tools
+package partest
+package category
+
+import nsc.io._
+import nsc.reporters._
+import nsc.{ Settings, CompilerCommand }
+import scala.tools.nsc.interactive.RefinedBuildManager
+import util.copyPath
+
+trait Compiler {
+ self: Universe =>
+
+ /** Resident Compiler.
+ * $SCALAC -d dir.obj -Xresident -sourcepath . "$@"
+ */
+ object Res extends DirBasedCategory("res") {
+ lazy val testSequence: TestSequence = List(compile, diff)
+
+ override def denotesTest(p: Path) = p.isDirectory && resFile(p).isFile
+ override def createTest(location: Path) = new ResidentTest(location.toDirectory)
+
+ override def createSettings(entity: TestEntity): TestSettings =
+ returning(super.createSettings(entity)) { settings =>
+ settings.resident.value = true
+ settings.sourcepath.value = entity.sourcesDir.path
+ }
+
+ class ResidentTest(val location: Directory) extends TestEntity {
+ val category = Res
+ override def precondition = checkFile.isFile && super.precondition
+ override def sourcesDir = categoryDir
+
+ override def acknowledges(p: Path) =
+ super.acknowledges(p) || (resFile(location) isSame p)
+
+ private def residentCompilerCommands = safeLines(resFile(location))
+ private def compileResident(global: PartestGlobal, lines: List[String]) = {
+ def printPrompt = global inform "nsc> "
+ val results =
+ lines map { line =>
+ printPrompt
+ trace("compile " + line)
+ isDryRun || global.partestCompile(toArgs(line) map (categoryDir / _ path), false)
+ }
+
+ printPrompt
+
+ /** Note - some res tests are really "neg" style tests, so we can't
+ * use the return value of the compile. The diff catches failures.
+ */
+ true // results forall (_ == true)
+ }
+
+ override def compile() = compileResident(newGlobal(Nil)._1, residentCompilerCommands)
+ }
+ private[Res] def resFile(p: Path) = p.toFile addExtension "res"
+ }
+
+ object BuildManager extends DirBasedCategory("buildmanager") {
+ lazy val testSequence: TestSequence = List(compile, diff)
+ override def denotesTest(p: Path) = p.isDirectory && testFile(p).isFile
+ override def createTest(location: Path) = new BuildManagerTest(location.toDirectory)
+
+ override def createSettings(entity: TestEntity): TestSettings =
+ returning[TestSettings](super.createSettings(entity)) { settings =>
+ settings.Ybuildmanagerdebug.value = true
+ settings.sourcepath.value = entity.sourcesDir.path
+ }
+
+ class PartestBuildManager(settings: Settings, val reporter: ConsoleReporter) extends RefinedBuildManager(settings) {
+ def errorFn(msg: String) = Console println msg
+
+ override protected def newCompiler(newSettings: Settings) =
+ new BuilderGlobal(newSettings, reporter)
+
+ private def filesToSet(pre: String, fs: List[String]): Set[AbstractFile] =
+ fs flatMap (s => Option(AbstractFile getFile (Path(settings.sourcepath.value) / s path))) toSet
+
+ def buildManagerCompile(line: String): Boolean = {
+ val prompt = "builder > "
+ reporter printMessage (prompt + line)
+ val command = new CompilerCommand(toArgs(line), settings)
+ val files = filesToSet(settings.sourcepath.value, command.files)
+
+ update(files, Set.empty)
+ true
+ }
+ }
+
+ private[BuildManager] def testFile(p: Path) = (p / p.name addExtension "test").toFile
+
+ class BuildManagerTest(val location: Directory) extends TestEntity {
+ val category = BuildManager
+
+ override def sourcesDir = outDir
+ override def sourceFiles = Path onlyFiles (location walkFilter (_ != changesDir) filter isJavaOrScala toList)
+ override def checkFile = File(location / location.name addExtension "check")
+ override def precondition = checkFile.isFile && super.precondition
+
+ override def acknowledges(p: Path) = super.acknowledges(p) || (p isSame testFile(location))
+
+ def buildManagerCommands = safeLines(testFile(location))
+ def changesDir = Directory(location / (location.name + ".changes"))
+
+ override def compile() = {
+ val settings = createSettings(this)
+ val pbm = new PartestBuildManager(settings, newReporter(settings))
+
+ // copy files
+ for (source <- sourceFiles) {
+ val target = outDir / (location.normalize relativize source)
+ copyPath(source, target.toFile)
+ }
+
+ def runUpdate(line: String) = {
+ val Array(srcName, replacement) = line split "=>"
+ copyPath(File(changesDir / replacement), File(outDir / srcName))
+ }
+
+ def sendCommand(line: String): Boolean = {
+ val compileRegex = """^>>compile (.*)$""".r
+ val updateRegex = """^>>update\s+(.*)""".r
+ trace(line drop 2)
+
+ isDryRun || (line match {
+ case compileRegex(xs) => pbm.buildManagerCompile(xs)
+ case updateRegex(line) => runUpdate(line)
+ })
+ }
+
+ // send each line to the build manager
+ buildManagerCommands forall sendCommand
+ }
+ }
+ }
+}
+
diff --git a/src/partest/scala/tools/partest/category/Runner.scala b/src/partest/scala/tools/partest/category/Runner.scala
new file mode 100644
index 0000000000..a7713d7dbe
--- /dev/null
+++ b/src/partest/scala/tools/partest/category/Runner.scala
@@ -0,0 +1,108 @@
+/* __ *\
+** ________ ___ / / ___ Scala Parallel Testing **
+** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.tools
+package partest
+package category
+
+import nsc.io._
+
+trait Runner {
+ self: Universe =>
+
+ /** Shootout.
+ */
+ object Shootout extends DirBasedCategory("shootout") {
+ lazy val testSequence: TestSequence = List(compile, run, diff)
+
+ override def denotesTest(p: Path) = isScala(p) && runner(p).isFile
+ override def createTest(location: Path) = new ShootoutTest(location.toFile)
+
+ class ShootoutTest(val location: File) extends TestEntity {
+ val category = Shootout
+ // The files in shootout are very free form, so acknowledge anything close.
+ override def acknowledges(p: Path) =
+ (p.parent.normalize isSame Shootout.root) && (p.name startsWith label)
+
+ private def generated = File(outDir / "test.scala")
+ private def runnerFile = runner(location)
+ override def sourceFiles = List(generated)
+
+ override def compile() = {
+ trace("generate %s from %s, %s".format(tracePath(generated), tracePath(location), tracePath(runnerFile)))
+ // generate source file (even on dry run, we need the path)
+ generated.writeAll(location.slurp(), runnerFile.slurp())
+
+ // compile generated file
+ super.compile()
+ }
+ }
+
+ private[Shootout] def runner(p: Path) = p addExtension "runner" toFile
+ }
+
+ object Scalacheck extends DirBasedCategory("scalacheck") {
+ lazy val testSequence: TestSequence = List(compile, run)
+ override def createTest(location: Path) = new ScalacheckTest(location)
+
+ class ScalacheckTest(val location: Path) extends TestEntity {
+ val category = Scalacheck
+
+ import build.{ scalacheck, forkjoin }
+ import org.scalacheck.Properties
+ import org.scalacheck.Test.{ checkProperties, defaultParams, Result }
+
+ override def classpathPaths = super.classpathPaths ::: List(scalacheck, forkjoin)
+ private def arrayURLs = Array(scalacheck, outDir) map (_.toURL)
+
+ /** For reasons I'm not entirely clear on, I've written all this
+ * to avoid a source dependency on scalacheck.
+ */
+ class ScalacheckClassLoader extends PartestClassLoader(arrayURLs, this.getClass.getClassLoader) {
+ type ScalacheckResult = { def passed: Boolean }
+
+ def propCallback(name: String, passed: Int, discarded: Int): Unit = ()
+ def testCallback(name: String, result: AnyRef): Unit = ()
+
+ val test = singleton("Test$")
+ val params = apply[AnyRef]("org.scalacheck.Test$", "defaultParams")()
+ val result = apply[Seq[(String, AnyRef)]]("org.scalacheck.Test$", "checkProperties")(test, params, propCallback _, testCallback _)
+
+ def allResults() =
+ for ((prop, res) <- result) yield {
+ ScalacheckTest.this.trace("scalacheck result for %s: %s".format(prop, res))
+ res.asInstanceOf[ScalacheckResult].passed
+ }
+
+ def check() = allResults forall (_ == true)
+ }
+
+ override def run() = {
+ trace("scalacheck runs via classloader with: %s".format(arrayURLs mkString ", "))
+ isDryRun || (new ScalacheckClassLoader check)
+ }
+ }
+ }
+
+ object Script extends DirBasedCategory("script") {
+ val testSequence: TestSequence = List(exec, diff)
+ override def createTest(location: Path) = new ScriptTest(location)
+
+ class ScriptTest(val location: Path) extends TestEntity {
+ val category = Script
+ val scriptFile = if (location.isDirectory) location / (label + ".scala") else location
+ val argsFile = withExtension("args").toFile
+ def batFile = scriptFile changeExtension "bat"
+ def script = if (Properties.isWin) batFile else scriptFile
+
+ override def acknowledges(p: Path) = super.acknowledges(p) || (List(argsFile, batFile) exists (_ isSame p))
+ override def execCwd = Some(sourcesDir)
+ override def argumentsToExec = script.path :: safeArgs(argsFile)
+ }
+ }
+} \ No newline at end of file
diff --git a/src/partest/scala/tools/partest/io/ANSIWriter.scala b/src/partest/scala/tools/partest/io/ANSIWriter.scala
new file mode 100644
index 0000000000..0ddcd97a5f
--- /dev/null
+++ b/src/partest/scala/tools/partest/io/ANSIWriter.scala
@@ -0,0 +1,58 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2010 LAMP/EPFL
+ * @author Philipp Haller
+ */
+
+package scala.tools
+package partest
+package io
+
+import java.io.{ Writer, PrintWriter, OutputStream, OutputStreamWriter }
+
+object ANSIWriter {
+ val NONE = 0
+ val SOME = 1
+ val MANY = 2
+
+ def apply(isAnsi: Boolean) = if (isAnsi) MANY else NONE
+}
+import ANSIWriter._
+
+class ANSIWriter(writer: Writer) extends PrintWriter(writer, true) {
+ def this(out: OutputStream) = this(new OutputStreamWriter(out))
+ def colorful: Int = NONE
+
+ protected val manyColors = List(
+ Console.BOLD + Console.BLACK,
+ Console.BOLD + Console.GREEN,
+ Console.BOLD + Console.RED,
+ Console.BOLD + Console.YELLOW,
+ Console.RESET
+ )
+ protected val someColors = List(
+ Console.BOLD + Console.BLACK,
+ Console.RESET,
+ Console.BOLD + Console.BLACK,
+ Console.BOLD + Console.BLACK,
+ Console.RESET
+ )
+ protected val noColors = List("", "", "", "", "")
+
+ lazy val List(_outline, _success, _failure, _warning, _default) = colorful match {
+ case NONE => noColors
+ case SOME => someColors
+ case MANY => manyColors
+ case _ => noColors
+ }
+
+ private def wrprint(msg: String): Unit = synchronized {
+ print(msg)
+ flush()
+ }
+
+ def outline(msg: String) = wrprint(_outline + msg + _default)
+ def success(msg: String) = wrprint(_success + msg + _default)
+ def failure(msg: String) = wrprint(_failure + msg + _default)
+ def warning(msg: String) = wrprint(_warning + msg + _default)
+ def normal(msg: String) = wrprint(_default + msg)
+}
diff --git a/src/partest/scala/tools/partest/nest/Diff.java b/src/partest/scala/tools/partest/io/Diff.java
index abd09d0293..c7a3d42f30 100644
--- a/src/partest/scala/tools/partest/nest/Diff.java
+++ b/src/partest/scala/tools/partest/io/Diff.java
@@ -1,6 +1,6 @@
// $Id$
-package scala.tools.partest.nest;
+package scala.tools.partest.io;
import java.util.Hashtable;
diff --git a/src/partest/scala/tools/partest/nest/DiffPrint.java b/src/partest/scala/tools/partest/io/DiffPrint.java
index 494bc06e4a..2b2ad93ec7 100644
--- a/src/partest/scala/tools/partest/nest/DiffPrint.java
+++ b/src/partest/scala/tools/partest/io/DiffPrint.java
@@ -1,6 +1,6 @@
// $Id$
-package scala.tools.partest.nest;
+package scala.tools.partest.io;
import java.io.*;
import java.util.Vector;
diff --git a/src/partest/scala/tools/partest/io/JUnitReport.scala b/src/partest/scala/tools/partest/io/JUnitReport.scala
new file mode 100644
index 0000000000..63ae200020
--- /dev/null
+++ b/src/partest/scala/tools/partest/io/JUnitReport.scala
@@ -0,0 +1,38 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2010 LAMP/EPFL
+ */
+
+package scala.tools
+package partest
+package io
+
+/** This is disabled for the moment but I can fix it up if anyone
+ * is using it.
+ */
+class JUnitReport {
+ // create JUnit Report xml files if directory was specified
+ // def junitReport(dir: Directory) = {
+ // dir.mkdir()
+ // val report = testReport(set.kind, results, succs, fails)
+ // XML.save("%s/%s.xml".format(d.toAbsolute.path, set.kind), report)
+ // }
+
+ // def oneResult(res: (TestEntity, Int)) =
+ // <testcase name={res._1.path}>{
+ // res._2 match {
+ // case 0 => scala.xml.NodeSeq.Empty
+ // case 1 => <failure message="Test failed"/>
+ // case 2 => <failure message="Test timed out"/>
+ // }
+ // }</testcase>
+ //
+ // def testReport(kind: String, results: Iterable[(TestEntity, Int)], succs: Int, fails: Int) = {
+ // <testsuite name={kind} tests={(succs + fails).toString} failures={fails.toString}>
+ // <properties/>
+ // {
+ // results.map(oneResult(_))
+ // }
+ // </testsuite>
+ // }
+ //
+} \ No newline at end of file
diff --git a/src/partest/scala/tools/partest/io/Logging.scala b/src/partest/scala/tools/partest/io/Logging.scala
new file mode 100644
index 0000000000..3d1b0fa0b4
--- /dev/null
+++ b/src/partest/scala/tools/partest/io/Logging.scala
@@ -0,0 +1,132 @@
+package scala.tools
+package partest
+package io
+
+import java.io.{ StringWriter, PrintWriter, Writer }
+import scala.tools.nsc.io._
+import scala.util.control.ControlThrowable
+
+trait Logging {
+ universe: Universe =>
+
+ class PartestANSIWriter extends ANSIWriter(Console.out) {
+ override def colorful: Int = ANSIWriter(universe.isAnsi)
+ private def printIf(cond: Boolean, msg: String) =
+ if (cond) { outline("debug: ") ; println(msg) }
+
+ val verbose = printIf(isVerbose || isDebug, _: String)
+ val debug = printIf(isDebug, _: String)
+ }
+
+ lazy val NestUI = new PartestANSIWriter()
+
+ import NestUI.{ _outline, _success, _failure, _warning, _default }
+
+ def markOutline(msg: String) = _outline + msg + _default
+ def markSuccess(msg: String) = _success + msg + _default
+ def markFailure(msg: String) = _failure + msg + _default
+ def markWarning(msg: String) = _warning + msg + _default
+ def markNormal(msg: String) = _default + msg
+
+ def outline(msg: String) = NestUI outline msg
+ def success(msg: String) = NestUI success msg
+ def failure(msg: String) = NestUI failure msg
+ def warning(msg: String) = NestUI warning msg
+ def normal(msg: String) = NestUI normal msg
+
+ def verbose(msg: String) = NestUI verbose msg
+ def debug(msg: String) = NestUI debug msg
+
+ trait EntityLogging {
+ self: TestEntity =>
+
+ lazy val logWriter = new LogWriter(logFile)
+
+ /** Redirect stdout and stderr to logFile, run body, return result.
+ */
+ def loggingOutAndErr[T](body: => T): T = {
+ val log = logFile.printStream(append = true)
+
+ try Console.withOut(log) {
+ Console.withErr(log) {
+ body
+ }
+ }
+ finally log.close()
+ }
+
+ /** XXX needs attention.
+ */
+ def failureMessage() = safeSlurp(logFile)
+
+ /** For tracing. Outputs a line describing the next action. tracePath
+ * is a path wrapper which prints name or full path depending on verbosity.
+ */
+ def trace(msg: String) = if (isTrace || isDryRun) System.err.println(">> [%s] %s".format(label, msg))
+ def tracePath(path: Path) = if (isVerbose) path.path else path.name
+
+ /** v == verbose.
+ */
+ def vtrace(msg: String) = if (isVerbose) trace(msg)
+
+ /** Run body, writes result to logFile. Any throwable is
+ * caught, stringified, and written to the log.
+ */
+ def loggingResult(body: => String) =
+ try returning(true)(_ => logFile writeAll body)
+ catch {
+ case x: ControlThrowable => throw x
+ case x: Throwable => logException(x)
+ }
+
+ def throwableToString(x: Throwable): String = {
+ val w = new StringWriter
+ x.printStackTrace(new PrintWriter(w))
+ w.toString
+ }
+
+ def warnAndLogException(msg: String, ex: Throwable) = {
+ val str = msg + throwableToString(ex)
+ warning(toStringTrunc(str, 800))
+ logWriter append str
+ }
+
+ def deleteLog(force: Boolean = false) =
+ if (universe.isNoCleanup && !force) debug("Not cleaning up " + logFile)
+ else logFile.deleteIfExists()
+
+ def onException(x: Throwable) { logException(x) }
+ def logException(x: Throwable) = {
+ val msg = throwableToString(x)
+ if (!isTerse)
+ normal(msg)
+
+ logWriter append msg
+ false
+ }
+ }
+
+ /** A writer which doesn't create the file until a write comes in.
+ */
+ class LazilyCreatedWriter(log: File) extends Writer {
+ @volatile private var isCreated = false
+ private lazy val underlying = {
+ isCreated = true
+ log.bufferedWriter()
+ }
+
+ def flush() = if (isCreated) underlying.flush()
+ def close() = if (isCreated) underlying.close()
+ def write(chars: Array[Char], off: Int, len: Int) = {
+ underlying.write(chars, off, len)
+ underlying.flush()
+ }
+ }
+
+ class LogWriter(log: File) extends PrintWriter(new LazilyCreatedWriter(log), true) {
+ override def print(s: String) = {
+ super.print(s)
+ flush()
+ }
+ }
+} \ No newline at end of file
diff --git a/src/partest/scala/tools/partest/nest/AntRunner.scala b/src/partest/scala/tools/partest/nest/AntRunner.scala
deleted file mode 100644
index cdc6961d3d..0000000000
--- a/src/partest/scala/tools/partest/nest/AntRunner.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala Parallel Testing **
-** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-package scala.tools.partest
-package nest
-
-import java.io.File
-
-class AntRunner extends DirectRunner {
-
- val fileManager = new FileManager {
- var JAVACMD: String = "java"
- var JAVAC_CMD: String = "javac"
- var CLASSPATH: String = _
- var EXT_CLASSPATH: String = _
- var LATEST_LIB: String = _
- val TESTROOT: String = ""
- }
-
- def reflectiveRunTestsForFiles(kindFiles: Array[File], kind: String): Int = {
- val (succs, fails) = runTestsForFiles(kindFiles.toList, kind)
- succs << 16 | fails
- }
-
-}
diff --git a/src/partest/scala/tools/partest/nest/CompileManager.scala b/src/partest/scala/tools/partest/nest/CompileManager.scala
deleted file mode 100644
index b67653d900..0000000000
--- a/src/partest/scala/tools/partest/nest/CompileManager.scala
+++ /dev/null
@@ -1,218 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2010 LAMP/EPFL
- * @author Philipp Haller
- */
-
-// $Id$
-
-package scala.tools.partest
-package nest
-
-import scala.tools.nsc.{Global, Settings, CompilerCommand, FatalError}
-import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
-
-import java.io.{File, BufferedReader, PrintWriter, FileReader, FileWriter, StringWriter}
-
-class ExtConsoleReporter(override val settings: Settings, reader: BufferedReader, var writer: PrintWriter) extends ConsoleReporter(settings, reader, writer) {
- def this(settings: Settings) = {
- this(settings, Console.in, new PrintWriter(new FileWriter("/dev/null")))
- }
- def hasWarnings: Boolean = WARNING.count != 0
-}
-
-abstract class SimpleCompiler {
- def compile(out: Option[File], files: List[File], kind: String, log: File): Boolean
-}
-
-class TestSettings(fileMan: FileManager) extends {
- override val bootclasspathDefault =
- System.getProperty("sun.boot.class.path", "") + File.pathSeparator +
- fileMan.LATEST_LIB
- override val extdirsDefault =
- System.getProperty("java.ext.dirs", "")
-} with Settings(x => ())
-
-class DirectCompiler(val fileManager: FileManager) extends SimpleCompiler {
- def newGlobal(settings: Settings, reporter: Reporter): Global =
- new Global(settings, reporter)
-
- def newGlobal(settings: Settings, logWriter: FileWriter): Global = {
- val rep = new ExtConsoleReporter(settings,
- Console.in,
- new PrintWriter(logWriter))
- rep.shortname = true
- newGlobal(settings, rep)
- }
-
- def newSettings = {
- val settings = new TestSettings(fileManager)
- settings.deprecation.value = true
- settings.nowarnings.value = false
- settings.encoding.value = "iso-8859-1"
- settings
- }
-
- def newReporter(sett: Settings) = new ExtConsoleReporter(sett,
- Console.in,
- new PrintWriter(new StringWriter))
-
- private def updatePluginPath(options: String): String = {
- val (opt1, opt2) =
- (options split "\\s").toList partition (_ startsWith "-Xplugin:")
- (opt2 mkString " ")+(
- if (opt1.isEmpty) ""
- else {
- def absolutize(path: String): List[String] = {
- val args = (path substring 9 split File.pathSeparator).toList
- val plugins = args map (arg =>
- if (new File(arg).isAbsolute) arg
- else fileManager.TESTROOT+File.separator+arg
- )
- plugins
- }
- " -Xplugin:"+((opt1 flatMap absolutize) mkString File.pathSeparator)
- }
- )
- }
-
- def compile(out: Option[File], files: List[File], kind: String, log: File): Boolean = {
- val testSettings = newSettings
- val logWriter = new FileWriter(log)
-
- // check whether there is a ".flags" file
- val testBase = {
- val logBase = fileManager.basename(log.getName)
- logBase.substring(0, logBase.length-4)
- }
- val argsFile = new File(log.getParentFile, testBase+".flags")
- val argString = if (argsFile.exists) {
- val fileReader = new FileReader(argsFile)
- val reader = new BufferedReader(fileReader)
- val options = updatePluginPath(reader.readLine())
- reader.close()
- options
- } else ""
- val allOpts = fileManager.SCALAC_OPTS+" "+argString
- NestUI.verbose("scalac options: "+allOpts)
- val args = (allOpts split "\\s").toList
- val command = new CompilerCommand(args, testSettings, x => {}, false)
- val global = newGlobal(command.settings, logWriter)
- val testRep: ExtConsoleReporter = global.reporter.asInstanceOf[ExtConsoleReporter]
-
- val test: TestFile = kind match {
- case "pos" => PosTestFile(files(0), fileManager, out.isEmpty)
- case "neg" => NegTestFile(files(0), fileManager, out.isEmpty)
- case "run" => RunTestFile(files(0), fileManager, out.isEmpty)
- case "jvm" => JvmTestFile(files(0), fileManager, out.isEmpty)
- case "shootout" => ShootoutTestFile(files(0), fileManager, out.isEmpty)
- case "scalap" => ScalapTestFile(files(0), fileManager, out.isEmpty)
- case "scalacheck" =>
- ScalaCheckTestFile(files(0), fileManager, out.isEmpty)
- }
- test.defineSettings(command.settings)
- out match {
- case Some(outDir) =>
- command.settings.outdir.value = outDir.getAbsolutePath
- command.settings.classpath.value = command.settings.classpath.value+
- File.pathSeparator+outDir.getAbsolutePath
- case None =>
- // do nothing
- }
-
- val toCompile = files.map(_.getPath)
- try {
- NestUI.verbose("compiling "+toCompile)
- try {
- (new global.Run) compile toCompile
- } catch {
- case FatalError(msg) =>
- testRep.error(null, "fatal error: " + msg)
- }
- testRep.printSummary
- testRep.writer.flush
- testRep.writer.close
- } catch {
- case e =>
- e.printStackTrace()
- return false
- } finally {
- logWriter.close()
- }
- !testRep.hasErrors
- }
-}
-
-class ReflectiveCompiler(val fileManager: ConsoleFileManager) extends SimpleCompiler {
- import fileManager.{latestCompFile, latestPartestFile}
-
- val sepUrls = Array(latestCompFile.toURI.toURL, latestPartestFile.toURI.toURL)
- //NestUI.verbose("constructing URLClassLoader from URLs "+latestCompFile+" and "+latestPartestFile)
-
- val sepLoader = new java.net.URLClassLoader(sepUrls, null)
-
- val sepCompilerClass =
- sepLoader.loadClass("scala.tools.partest.nest.DirectCompiler")
- val sepCompiler = sepCompilerClass.newInstance()
-
- // needed for reflective invocation
- val fileClass = Class.forName("java.io.File")
- val stringClass = Class.forName("java.lang.String")
- val sepCompileMethod =
- sepCompilerClass.getMethod("compile", fileClass, stringClass)
- val sepCompileMethod2 =
- sepCompilerClass.getMethod("compile", fileClass, stringClass, fileClass)
-
- /* This method throws java.lang.reflect.InvocationTargetException
- * if the compiler crashes.
- * This exception is handled in the shouldCompile and shouldFailCompile
- * methods of class CompileManager.
- */
- def compile(out: Option[File], files: List[File], kind: String, log: File): Boolean = {
- val res = sepCompileMethod2.invoke(sepCompiler, out, files, kind, log).asInstanceOf[java.lang.Boolean]
- res.booleanValue()
- }
-}
-
-class CompileManager(val fileManager: FileManager) {
- var compiler: SimpleCompiler = new /*ReflectiveCompiler*/ DirectCompiler(fileManager)
-
- var numSeparateCompilers = 1
- def createSeparateCompiler() = {
- numSeparateCompilers += 1
- compiler = new /*ReflectiveCompiler*/ DirectCompiler(fileManager)
- }
-
- /* This method returns true iff compilation succeeds.
- */
- def shouldCompile(files: List[File], kind: String, log: File): Boolean = {
- createSeparateCompiler()
- compiler.compile(None, files, kind, log)
- }
-
- /* This method returns true iff compilation succeeds.
- */
- def shouldCompile(out: File, files: List[File], kind: String, log: File): Boolean = {
- createSeparateCompiler()
- compiler.compile(Some(out), files, kind, log)
- }
-
- /* This method returns true iff compilation fails
- * _and_ the compiler does _not_ crash or loop.
- *
- * If the compiler crashes, this method returns false.
- */
- def shouldFailCompile(files: List[File], kind: String, log: File): Boolean = {
- createSeparateCompiler()
- !compiler.compile(None, files, kind, log)
- }
-
- /* This method returns true iff compilation fails
- * _and_ the compiler does _not_ crash or loop.
- *
- * If the compiler crashes, this method returns false.
- */
- def shouldFailCompile(out: File, files: List[File], kind: String, log: File): Boolean = {
- createSeparateCompiler()
- !compiler.compile(Some(out), files, kind, log)
- }
-}
diff --git a/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala b/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala
deleted file mode 100644
index 27cd36e3e7..0000000000
--- a/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala
+++ /dev/null
@@ -1,314 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2010 LAMP/EPFL
- * @author Philipp Haller
- */
-
-// $Id$
-
-package scala.tools.partest
-package nest
-
-import java.io.{File, FilenameFilter, IOException, StringWriter}
-import java.net.URI
-
-class ConsoleFileManager extends FileManager {
-
- var testBuild = System.getProperty("scalatest.build")
- var testClasses: Option[String] = None
-
- val debug: Boolean =
- (System.getProperty("partest.debug", "false") equals "true") ||
- (System.getProperty("scalatest.debug", "false") equals "true")
-
- def this(buildPath: String, rawClasses: Boolean) = {
- this()
- if (rawClasses)
- testClasses = Some(buildPath)
- else
- testBuild = buildPath
- // re-run because initialization of default
- // constructor must be updated
- findLatest()
- }
-
- def this(buildPath: String) = {
- this(buildPath, false)
- }
-
- def this(buildPath: String, rawClasses: Boolean, moreOpts: String) = {
- this(buildPath, rawClasses)
- SCALAC_OPTS = SCALAC_OPTS+" "+moreOpts
- }
-
- var CLASSPATH = System.getProperty("java.class.path", ".")
- NestUI.verbose("CLASSPATH: "+CLASSPATH)
-
- var JAVACMD = System.getProperty("scalatest.javacmd", "java")
- var JAVAC_CMD = System.getProperty("scalatest.javac_cmd", "javac")
-
- val prefixFile = {
- val cwd = System.getProperty("user.dir")
- if (cwd != null)
- (new File(cwd)).getCanonicalFile
- else
- error("user.dir property not set")
- }
- val PREFIX = prefixFile.getAbsolutePath
-
-/*
-if [ -d "$PREFIX/test" ]; then
- TESTROOT="$PREFIX/test";
-elif [ -d "$PREFIX/misc/scala-test" ]; then
- TESTROOT="$PREFIX/misc/scala-test";
-else
- abort "Test directory not found";
-*/
-
- val testRootFile = {
- val testRootProp = System.getProperty("scalatest.root")
- val testroot =
- if (testRootProp != null)
- new File(testRootProp)
- else {
- // case 1: cwd is `test`
- if (prefixFile.getName == "test" && (new File(prefixFile, "files")).exists)
- prefixFile
- else {
- // case 2: cwd is `test/..`
- val test = new File(prefixFile, "test")
- val scalaTest = new File(new File(prefixFile, "misc"), "scala-test")
- if (test.isDirectory)
- test
- else if (scalaTest.isDirectory)
- scalaTest
- else
- error("Test directory not found")
- }
- }
- testroot.getCanonicalFile
- }
- val TESTROOT = testRootFile.getAbsolutePath
-
- var srcDirName: String = ""
-
- val srcDir: File = {
- val srcDirProp = System.getProperty("partest.srcdir")
- val src =
- if (srcDirProp != null) {
- srcDirName = srcDirProp
- new File(testRootFile, srcDirName)
- } else {
- srcDirName = "files"
- new File(testRootFile, srcDirName)
- }
- if (src.isDirectory)
- src.getCanonicalFile
- else {
- val path = TESTROOT + File.separator + "files"
- NestUI.failure("Source directory \"" + path + "\" not found")
- exit(1)
- }
- }
-
- LIB_DIR = (new File(testRootFile.getParentFile, "lib")).getCanonicalFile.getAbsolutePath
-
- CLASSPATH = CLASSPATH + File.pathSeparator + {
- val libs = new File(srcDir, "lib")
- // add all jars in libs
- (libs.listFiles(new FilenameFilter {
- def accept(dir: File, name: String) = name endsWith ".jar"
- }) map {file => file.getCanonicalFile.getAbsolutePath}).mkString(""+File.pathSeparator)
- }
-
- def findLatest() {
- val testParent = testRootFile.getParentFile
- NestUI.verbose("test parent: "+testParent)
-
- def prefixFileWith(parent: File, relPath: String): File =
- (new File(parent, relPath)).getCanonicalFile
-
- def prefixFile(relPath: String): File =
- prefixFileWith(testParent, relPath)
-
- if (!testClasses.isEmpty) {
- testClassesFile = (new File(testClasses.get)).getCanonicalFile
- NestUI.verbose("Running with classes in "+testClassesFile)
- latestFile = prefixFileWith(testClassesFile.getParentFile, "bin")
- latestLibFile = prefixFileWith(testClassesFile, "library")
- latestActFile = prefixFileWith(testClassesFile, "library")
- latestCompFile = prefixFileWith(testClassesFile, "compiler")
- latestPartestFile = prefixFileWith(testClassesFile, "partest")
- latestFjbgFile = prefixFile("lib/fjbg.jar")
- }
- else if (testBuild != null) {
- testBuildFile = prefixFile(testBuild)
- NestUI.verbose("Running on "+testBuild)
- latestFile = prefixFile(testBuild+"/bin")
- latestLibFile = prefixFile(testBuild+"/lib/scala-library.jar")
- latestActFile = prefixFile(testBuild+"/lib/scala-library.jar")
- latestCompFile = prefixFile(testBuild+"/lib/scala-compiler.jar")
- latestPartestFile = prefixFile(testBuild+"/lib/scala-partest.jar")
- } else {
- def setupQuick() {
- NestUI.verbose("Running build/quick")
- latestFile = prefixFile("build/quick/bin")
- latestLibFile = prefixFile("build/quick/classes/library")
- latestActFile = prefixFile("build/quick/classes/library")
- latestCompFile = prefixFile("build/quick/classes/compiler")
- latestPartestFile = prefixFile("build/quick/classes/partest")
- }
-
- def setupInst() {
- NestUI.verbose("Running dist (installed)")
- val p = testParent.getParentFile
- latestFile = prefixFileWith(p, "bin")
- latestLibFile = prefixFileWith(p, "lib/scala-library.jar")
- latestActFile = prefixFileWith(p, "lib/scala-library.jar")
- latestCompFile = prefixFileWith(p, "lib/scala-compiler.jar")
- latestPartestFile = prefixFileWith(p, "lib/scala-partest.jar")
- }
-
- def setupDist() {
- NestUI.verbose("Running dists/latest")
- latestFile = prefixFile("dists/latest/bin")
- latestLibFile = prefixFile("dists/latest/lib/scala-library.jar")
- latestActFile = prefixFile("dists/latest/lib/scala-library.jar")
- latestCompFile = prefixFile("dists/latest/lib/scala-compiler.jar")
- latestPartestFile = prefixFile("dists/latest/lib/scala-partest.jar")
- }
-
- def setupPack() {
- NestUI.verbose("Running build/pack")
- latestFile = prefixFile("build/pack/bin")
- latestLibFile = prefixFile("build/pack/lib/scala-library.jar")
- latestActFile = prefixFile("build/pack/lib/scala-library.jar")
- latestCompFile = prefixFile("build/pack/lib/scala-compiler.jar")
- latestPartestFile = prefixFile("build/pack/lib/scala-partest.jar")
- }
-
- def max(a: Long, b: Long) = if (a > b) a else b
-
- val dists = new File(testParent, "dists")
- val build = new File(testParent, "build")
- // in case of an installed dist, testRootFile is one level deeper
- val bin = new File(testParent.getParentFile, "bin")
-
- // detect most recent build
- val quickTime =
- max(prefixFile("build/quick/classes/compiler/compiler.properties").lastModified,
- prefixFile("build/quick/classes/library/library.properties").lastModified)
- val packTime =
- max(prefixFile("build/pack/lib/scala-compiler.jar").lastModified,
- prefixFile("build/pack/lib/scala-library.jar").lastModified)
- val distTime =
- max(prefixFile("dists/latest/lib/scala-compiler.jar").lastModified,
- prefixFile("dists/latest/lib/scala-library.jar").lastModified)
- val instTime = {
- val p = testParent.getParentFile
- max(prefixFileWith(p, "lib/scala-compiler.jar").lastModified,
- prefixFileWith(p, "lib/scala-library.jar").lastModified)
- }
-
- if (quickTime > packTime) { // pack ruled out
- if (quickTime > distTime) { // dist ruled out
- if (quickTime > instTime) // inst ruled out
- setupQuick()
- else
- setupInst()
- } else { // quick ruled out
- if (distTime > instTime) // inst ruled out
- setupDist()
- else
- setupInst()
- }
- } else { // quick ruled out
- if (packTime > distTime) { // dist ruled out
- if (packTime > instTime) // inst ruled out
- setupPack()
- else
- setupInst()
- } else { // pack ruled out
- if (distTime > instTime) // inst ruled out
- setupDist()
- else
- setupInst()
- }
- }
- latestFjbgFile = prefixFile("lib/fjbg.jar")
- }
-
- BIN_DIR = latestFile.getAbsolutePath
- LATEST_LIB = latestLibFile.getAbsolutePath
- LATEST_COMP = latestCompFile.getAbsolutePath
- LATEST_PARTEST = latestPartestFile.getAbsolutePath
-
- // detect whether we are running on Windows
- val osName = System.getProperty("os.name")
- NestUI.verbose("OS: "+osName)
-
- val scalaCommand = if (osName startsWith "Windows")
- "scala.bat" else "scala"
- val scalacCommand = if (osName startsWith "Windows")
- "scalac.bat" else "scalac"
-
- SCALA = (new File(latestFile, scalaCommand)).getAbsolutePath
- SCALAC_CMD = (new File(latestFile, scalacCommand)).getAbsolutePath
- }
-
- var BIN_DIR: String = ""
- var LATEST_LIB: String = ""
- var LATEST_COMP: String = ""
- var LATEST_PARTEST: String = ""
- var SCALA: String = ""
- var SCALAC_CMD: String = ""
-
- var latestFile: File = _
- var latestLibFile: File = _
- var latestActFile: File = _
- var latestCompFile: File = _
- var latestPartestFile: File = _
- var latestFjbgFile: File = _
- var testBuildFile: File = _
- var testClassesFile: File = _
- // initialize above fields
- findLatest()
-
- var testFiles: List[File] = List()
-
- def getFiles(kind: String, doCheck: Boolean, filter: Option[(String, Boolean)]): List[File] = {
- val dir = new File(srcDir, kind)
- NestUI.verbose("look in "+dir+" for tests")
- val files = if (dir.isDirectory) {
- if (!testFiles.isEmpty) {
- val dirpath = dir.getAbsolutePath
- testFiles filter { _.getParentFile.getAbsolutePath == dirpath }
- } else if (doCheck) filter match {
- case Some((ending, enableDirs)) =>
- val filter = new FilenameFilter {
- def accept(dir: File, name: String) =
- name.endsWith(ending) ||
- (enableDirs && (name != ".svn") && (!name.endsWith(".obj")) &&
- (new File(dir, name)).isDirectory)
- }
- dir.listFiles(filter).toList
- case None =>
- val filter = new FilenameFilter {
- def accept(dir: File, name: String) = name != ".svn"
- }
- dir.listFiles(filter).toList
- } else // skip
- Nil
- } else {
- NestUI.failure("Directory \"" + dir.getPath + "\" not found")
- Nil
- }
- if (failed)
- files filter { logFileExists(_, kind) }
- else
- files
- }
-
- def getFiles(kind: String, doCheck: Boolean): List[File] =
- getFiles(kind, doCheck, Some((".scala", true)))
-
-}
diff --git a/src/partest/scala/tools/partest/nest/ConsoleRunner.scala b/src/partest/scala/tools/partest/nest/ConsoleRunner.scala
deleted file mode 100644
index 574cc70762..0000000000
--- a/src/partest/scala/tools/partest/nest/ConsoleRunner.scala
+++ /dev/null
@@ -1,237 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2010 LAMP/EPFL
- * @author Philipp Haller
- */
-
-// $Id$
-
-package scala.tools.partest
-package nest
-
-import java.io.{File, PrintStream, FileOutputStream, BufferedReader,
- InputStreamReader, StringWriter, PrintWriter}
-import utils.Properties._
-
-class ConsoleRunner extends DirectRunner with RunnerUtils {
-
- case class TestSet(loc: String,
- filter: Option[(String, Boolean)],
- kind: String,
- msg: String)
-
- val testSets = {
- val fileFilter = Some((".scala", true))
- List(
- TestSet("pos", fileFilter, "pos",
- "Testing compiler (on files whose compilation should succeed)"),
- TestSet("neg", fileFilter, "neg",
- "Testing compiler (on files whose compilation should fail)"),
- TestSet("run", fileFilter, "run", "Testing JVM backend"),
- TestSet("jvm", fileFilter, "jvm", "Testing JVM backend"),
- TestSet("res", Some((".res", false)), "res",
- "Testing resident compiler"),
- TestSet("shootout", fileFilter, "shootout", "Testing shootout tests"),
- TestSet("script", fileFilter, "script", "Testing script tests"),
- TestSet("scalacheck", fileFilter, "scalacheck", "Testing ScalaCheck tests"),
- TestSet("scalap", fileFilter, "scalap", "Run scalap decompiler tests"))
- }
-
- var fileManager: ConsoleFileManager = _
-
- private val isJava5 = javaVersion matches "1.[5|6|7].*"
- private var runAll = false
- private var testFiles: List[File] = List()
- private val errors =
- Integer.parseInt(System.getProperty("scalatest.errors", "0"))
-
- def denotesTestSet(arg: String) =
- testSets exists { set => arg == "--" + set.loc }
-
- def denotesTestFile(arg: String) =
- arg.endsWith(".scala") || arg.endsWith(".res")
-
- def denotesTestDir(arg: String) =
- (new File(arg)).isDirectory
-
- private def printVersion { NestUI outline (versionMsg + "\n") }
-
- def main(argstr: String) {
- // tokenize args. filter: "".split("\\s") yields Array("")
- var args = (argstr split "\\s").toList.filterNot(_ == "")
-
- if (args.length == 0)
- NestUI.usage()
- else {
- // find out which build to test
- val (buildPath, args1) = searchAndRemovePath("--buildpath", args)
- val (classPath, args2) = searchAndRemovePath("--classpath", args1)
- val (srcPath, args3) = searchAndRemovePath("--srcpath", args2)
- args = args3
-
- if (!srcPath.isEmpty)
- System.setProperty("partest.srcdir", srcPath.get)
-
- fileManager =
- if (!buildPath.isEmpty)
- new ConsoleFileManager(buildPath.get)
- else if (!classPath.isEmpty)
- new ConsoleFileManager(classPath.get, true)
- else if (args contains "--pack") {
- args = args.filterNot(_ == "--pack") // will create a result file '--pack' otherwise
- new ConsoleFileManager("build/pack")
- } else // auto detection, see ConsoleFileManager.findLatest
- new ConsoleFileManager
-
- if (!args.exists(denotesTestSet(_)) &&
- !args.exists(denotesTestFile(_)) &&
- !args.exists(denotesTestDir(_)))
- runAll = true
-
- var enabled = List[TestSet]()
- var readTimeout = false
- for (arg <- args) {
- (testSets find { set => arg == "--" + set.loc }) match {
- case Some(set) => enabled = set :: enabled
- case None => arg match {
- case "--all" => runAll = true
- case "--verbose" => NestUI._verbose = true
- case "--show-diff" => fileManager.showDiff = true
- case "--show-log" => fileManager.showLog = true
- case "--failed" => fileManager.failed = true
- case "--version" => printVersion; return
- case "--ansi" => NestUI.initialize(NestUI.MANY)
- case "--timeout" => readTimeout = true
- case s: String if readTimeout =>
- fileManager.timeout = s
- readTimeout = false
- case _ =>
- if (denotesTestFile(arg) || denotesTestDir(arg)) {
- val file = new File(arg)
- if (file.exists) {
- NestUI.verbose("adding test file "+file)
- testFiles = file :: testFiles
- } else {
- NestUI.failure("File \"" + arg + "\" not found\n")
- System.exit(1)
- }
- } else {
- NestUI.failure("Invalid option \""+arg+"\"\n")
- NestUI.usage()
- }
- }
- }
- }
- NestUI.verbose("enabled test sets: "+enabled)
- NestUI.verbose("runAll: "+runAll)
-
- val dir =
- if (!fileManager.testClasses.isEmpty)
- fileManager.testClassesFile
- else if (fileManager.testBuild != null)
- fileManager.testBuildFile
- else
- fileManager.latestCompFile.getParentFile.getParentFile.getCanonicalFile
- NestUI.outline("Scala compiler classes in: "+dir+"\n")
-
- NestUI.outline("Scala version is: "+scala.tools.nsc.Properties.versionMsg+"\n")
- NestUI.outline("Scalac options are: "+fileManager.SCALAC_OPTS+"\n")
-
- val vmBin = javaHome + File.separator + "bin"
- val vmName = "%s (build %s, %s)".format(javaVmName, javaVmVersion, javaVmInfo)
- val vmOpts = fileManager.JAVA_OPTS
-
- NestUI.outline("Java binaries in: "+vmBin+"\n")
- NestUI.outline("Java runtime is: "+vmName+"\n")
- NestUI.outline("Java options are: "+vmOpts+"\n")
- NestUI.outline("Source directory is: "+fileManager.srcDir.getAbsolutePath+"\n")
-
- val start = System.currentTimeMillis
-
- val (successes, failures) = testCheckAll(enabled)
-
- val end = System.currentTimeMillis
- val total = successes + failures
-
- val elapsedSecs = (end - start)/1000
- val elapsedMins = elapsedSecs/60
- val elapsedHrs = elapsedMins/60
- val dispMins = elapsedMins - elapsedHrs * 60
- val dispSecs = elapsedSecs - elapsedMins * 60
- val dispElapsed = {
- def form(num: Long) = if (num < 10) "0"+num else ""+num
- form(elapsedHrs)+":"+form(dispMins)+":"+form(dispSecs)
- }
-
- println
- if (failures == 0)
- NestUI.success("All of "+total+" tests were successful (elapsed time: "+dispElapsed+")\n")
- else
- NestUI.failure(failures+" of "+total+" tests failed (elapsed time: "+dispElapsed+")\n")
-
- if (failures == errors)
- System.exit(0)
- else
- System.exit(1)
- }
- }
-
- def runTests(testSet: TestSet): (Int, Int) = {
- val TestSet(loc, filter, kind, msg) = testSet
- val files = fileManager.getFiles(loc, true, filter)
- if (!files.isEmpty) {
- NestUI.verbose("test files: "+files)
- NestUI.outline("\n"+msg+"\n")
- runTestsForFiles(files, kind)
- } else {
- NestUI.verbose("test dir empty\n")
- (0, 0)
- }
- }
-
- /**
- * @return (success count, failure count)
- */
- def testCheckAll(enabledSets: List[TestSet]): (Int, Int) = {
- def runTestsFiles = if (!testFiles.isEmpty) {
- def absName(f: File): String = f.getAbsoluteFile.getCanonicalPath
-
- def kindOf(f: File): String = {
- val firstName = absName(f)
- val len = fileManager.srcDirName.length
- val filesPos = firstName.indexOf(fileManager.srcDirName)
- if (filesPos == -1) {
- NestUI.failure("invalid test file: "+firstName+"\n")
- Predef.exit(1)
- } else {
- val short = firstName.substring(filesPos+len+1, filesPos+len+1+3)
- val shortKinds = List("pos", "neg", "run", "jvm", "res")
- if (shortKinds contains short) short
- else short match {
- case "sho" => "shootout"
- case "scr" => "script"
- case "sca" => "scalacheck"
- }
- }
- }
-
- val fstKind = kindOf(testFiles.head)
- NestUI.verbose("all test files expected to have kind "+fstKind)
- if (!testFiles.forall(kindOf(_) equals fstKind)) {
- NestUI.failure("test files have different kinds\n")
- Predef.exit(1)
- } else {
- NestUI.outline("\nTesting individual files\n")
- runTestsForFiles(testFiles, fstKind)
- }
- } else (0, 0)
-
- val runSets =
- if (runAll) testSets // run all test sets
- else enabledSets
- NestUI.verbose("run sets: "+runSets)
-
- val results = List(runTestsFiles) ::: (runSets map runTests)
- results reduceLeft { (p: (Int, Int), q: (Int, Int)) =>
- (p._1+q._1, p._2+q._2) }
- }
-}
diff --git a/src/partest/scala/tools/partest/nest/DirectRunner.scala b/src/partest/scala/tools/partest/nest/DirectRunner.scala
deleted file mode 100644
index 7ea74424fc..0000000000
--- a/src/partest/scala/tools/partest/nest/DirectRunner.scala
+++ /dev/null
@@ -1,78 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2010 LAMP/EPFL
- * @author Philipp Haller
- */
-
-// $Id$
-
-package scala.tools.partest
-package nest
-
-import java.io.{File, PrintStream, FileOutputStream, BufferedReader,
- InputStreamReader, StringWriter, PrintWriter}
-import java.util.StringTokenizer
-import scala.tools.nsc.io.Directory
-
-import scala.actors.Actor._
-import scala.actors.TIMEOUT
-
-trait DirectRunner {
-
- def fileManager: FileManager
-
- private val numActors = Integer.parseInt(System.getProperty("scalatest.actors", "8"))
-
- if ((System.getProperty("partest.debug", "false") equals "true") ||
- (System.getProperty("scalatest.debug", "false") equals "true"))
- scala.actors.Debug.level = 3
-
- private val coreProp = try {
- System.getProperty("actors.corePoolSize")
- } catch {
- case ace: java.security.AccessControlException =>
- null
- }
- if (coreProp == null) {
- scala.actors.Debug.info("actors.corePoolSize not defined")
- System.setProperty("actors.corePoolSize", "16")
- }
-
- def runTestsForFiles(kindFiles: List[File], kind: String): (Int, Int) = {
- val len = kindFiles.length
- val (testsEach, lastFrag) = (len/numActors, len%numActors)
- val last = numActors-1
- val workers = for (i <- List.range(0, numActors)) yield {
- val toTest = kindFiles.slice(i*testsEach, (i+1)*testsEach)
- val worker = new Worker(fileManager)
- worker.start()
- if (i == last)
- worker ! RunTests(kind, (kindFiles splitAt (last*testsEach))._2)
- else
- worker ! RunTests(kind, toTest)
- worker
- }
- var succs = 0; var fails = 0
- var logsToDelete: List[File] = List()
- var outdirsToDelete: List[File] = List()
- workers foreach { w =>
- receiveWithin(3600 * 1000) {
- case Results(s, f, logs, outdirs) =>
- logsToDelete = logsToDelete ::: logs.filter(_.toDelete)
- outdirsToDelete = outdirsToDelete ::: outdirs
- succs += s
- fails += f
- case TIMEOUT =>
- // add at least one failure
- NestUI.verbose("worker timed out; adding failed test")
- fails += 1
- }
- }
- for (x <- logsToDelete ::: outdirsToDelete) {
- NestUI.verbose("deleting "+x)
- Directory(x).deleteRecursively()
- }
-
- (succs, fails)
- }
-
-}
diff --git a/src/partest/scala/tools/partest/nest/FileManager.scala b/src/partest/scala/tools/partest/nest/FileManager.scala
deleted file mode 100644
index 637999cc36..0000000000
--- a/src/partest/scala/tools/partest/nest/FileManager.scala
+++ /dev/null
@@ -1,79 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2010 LAMP/EPFL
- * @author Philipp Haller
- */
-
-// $Id$
-
-package scala.tools.partest
-package nest
-
-import java.io.{File, FilenameFilter, IOException, StringWriter}
-import java.net.URI
-import scala.tools.nsc.io.Directory
-
-trait FileManager {
-
- def basename(name: String): String = {
- val inx = name.lastIndexOf(".")
- if (inx < 0) name else name.substring(0, inx)
- }
-
- def deleteRecursive(dir: File) { Directory(dir).deleteRecursively() }
-
- /**
- * Compares two files using a Java implementation of the GNU diff
- * available at http://www.bmsi.com/java/#diff.
- *
- * @param f1 the first file to be compared
- * @param f2 the second file to be compared
- * @return the text difference between the compared files
- */
- def compareFiles(f1: File, f2: File): String = {
- var res = ""
- try {
- val diffWriter = new StringWriter
- val args = Array(f1.getCanonicalPath(), f2.getCanonicalPath())
- DiffPrint.doDiff(args, diffWriter)
- res = diffWriter.toString
- if (res startsWith "No")
- res = ""
- } catch {
- case e: IOException =>
- e.printStackTrace()
- }
- res
- }
-
- var JAVACMD: String
- var JAVAC_CMD: String
-
- var CLASSPATH: String
- var LATEST_LIB: String
- var LIB_DIR: String = ""
-
- val TESTROOT: String
-
- var showDiff = false
- var showLog = false
- var failed = false
-
- var SCALAC_OPTS = System.getProperty("scalatest.scalac_opts", "-deprecation")
- var JAVA_OPTS = System.getProperty("scalatest.java_opts", "")
-
- var timeout = "1200000"
-
- def getLogFile(dir: File, fileBase: String, kind: String): LogFile =
- new LogFile(dir, fileBase + "-" + kind + ".log")
-
- def getLogFile(file: File, kind: String): LogFile = {
- val dir = file.getParentFile
- val fileBase = basename(file.getName)
- getLogFile(dir, fileBase, kind)
- }
-
- def logFileExists(file: File, kind: String): Boolean = {
- val logFile = getLogFile(file, kind)
- logFile.exists && logFile.canRead
- }
-}
diff --git a/src/partest/scala/tools/partest/nest/NestRunner.scala b/src/partest/scala/tools/partest/nest/NestRunner.scala
deleted file mode 100644
index 158521875e..0000000000
--- a/src/partest/scala/tools/partest/nest/NestRunner.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2010 LAMP/EPFL
- * @author Philipp Haller
- */
-
-// $Id$
-
-package scala.tools.partest
-package nest
-
-object NestRunner {
- def main(args: Array[String]) {
- val argstr = args.mkString(" ")
- (new ReflectiveRunner).main(argstr)
- }
-}
diff --git a/src/partest/scala/tools/partest/nest/NestUI.scala b/src/partest/scala/tools/partest/nest/NestUI.scala
deleted file mode 100644
index b8d77bc704..0000000000
--- a/src/partest/scala/tools/partest/nest/NestUI.scala
+++ /dev/null
@@ -1,108 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2010 LAMP/EPFL
- * @author Philipp Haller
- */
-
-// $Id$
-
-package scala.tools.partest
-package nest
-
-import java.io.PrintWriter
-
-object NestUI {
-
- val NONE = 0
- val SOME = 1
- val MANY = 2
-
- private var _outline = ""
- private var _success = ""
- private var _failure = ""
- private var _warning = ""
- private var _default = ""
-
- def initialize(number: Int) = number match {
- case MANY =>
- _outline = Console.BOLD + Console.BLACK
- _success = Console.BOLD + Console.GREEN
- _failure = Console.BOLD + Console.RED
- _warning = Console.BOLD + Console.YELLOW
- _default = Console.RESET
- case SOME =>
- _outline = Console.BOLD + Console.BLACK
- _success = Console.RESET
- _failure = Console.BOLD + Console.BLACK
- _warning = Console.BOLD + Console.BLACK
- _default = Console.RESET
- case _ =>
- }
-
- def outline(msg: String) = print(_outline + msg + _default)
- def outline(msg: String, wr: PrintWriter) = synchronized {
- wr.print(_outline + msg + _default)
- }
-
- def success(msg: String) = print(_success + msg + _default)
- def success(msg: String, wr: PrintWriter) = synchronized {
- wr.print(_success + msg + _default)
- }
-
- def failure(msg: String) = print(_failure + msg + _default)
- def failure(msg: String, wr: PrintWriter) = synchronized {
- wr.print(_failure + msg + _default)
- }
-
- def warning(msg: String) = print(_warning + msg + _default)
- def warning(msg: String, wr: PrintWriter) = synchronized {
- wr.print(_warning + msg + _default)
- }
-
- def normal(msg: String) = print(_default + msg)
- def normal(msg: String, wr: PrintWriter) = synchronized {
- wr.print(_default + msg)
- }
-
- def usage() {
- println("Usage: NestRunner [<options>] [<testfile> ..] [<resfile>]")
- println(" <testfile>: list of files ending in '.scala'")
- println(" <resfile>: a file not ending in '.scala'")
- println(" <options>:")
- println
- println(" Test categories:")
- println(" --all run all tests")
- println(" --pos run compilation tests (success)")
- println(" --neg run compilation tests (failure)")
- println(" --run run interpreter and backend tests")
- println(" --jvm run JVM backend tests")
- println(" --res run resident compiler tests")
- println(" --script run script runner tests")
- println(" --shootout run shootout tests")
- println
- println(" Other options:")
- println(" --pack pick compiler/library in build/pack, and run all tests")
- println(" --show-log show log")
- println(" --show-diff show diff between log and check file")
- println(" --failed run only those tests that failed during the last run")
- println(" --verbose show progress information")
- println(" --buildpath set (relative) path to build jars")
- println(" ex.: --buildpath build/pack")
- println(" --classpath set (absolute) path to build classes")
- println(" --srcpath set (relative) path to test source files")
- println(" ex.: --srcpath pending")
- println
- println(utils.Properties.versionString)
- println("maintained by Philipp Haller (EPFL)")
- exit(1)
- }
-
- var _verbose = false
-
- def verbose(msg: String) {
- if (_verbose) {
- outline("debug: ")
- println(msg)
- }
- }
-
-}
diff --git a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
deleted file mode 100644
index bcedaa38be..0000000000
--- a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
+++ /dev/null
@@ -1,80 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2010 LAMP/EPFL
- * @author Philipp Haller
- */
-
-// $Id$
-
-package scala.tools.partest
-package nest
-
-/* This class is used to load an instance of DirectRunner using
- * a custom class loader.
- * The purpose is to "auto-detect" a good classpath for the
- * rest of the classes (Worker, CompileManager etc.), so that
- * the main NestRunner can be started merely by putting its
- * class on the classpath (ideally).
- */
-class ReflectiveRunner extends RunnerUtils {
- // TODO: we might also use fileManager.CLASSPATH
- // to use the same classes as used by `scala` that
- // was used to start the runner.
-
- import java.net.URLClassLoader
- import java.io.File.pathSeparator
- import utils.Properties.{ sysprop, syspropset }
-
- val sepRunnerClassName = "scala.tools.partest.nest.ConsoleRunner"
-
- def main(args: String) {
- val argList = (args.split("\\s")).toList
-
- // find out which build to test
- val buildPath = searchPath("--buildpath", argList)
- val classPath = searchPath("--classpath", argList)
- val fileManager =
- if (!buildPath.isEmpty)
- new ConsoleFileManager(buildPath.get)
- else if (!classPath.isEmpty)
- new ConsoleFileManager(classPath.get, true)
- else if (argList contains "--pack")
- new ConsoleFileManager("build/pack")
- else // auto detection
- new ConsoleFileManager
-
- import fileManager.
- { latestCompFile, latestLibFile, latestActFile, latestPartestFile, latestFjbgFile }
- val files =
- Array(latestCompFile, latestLibFile, latestActFile, latestPartestFile, latestFjbgFile)
-
- val sepUrls = files map { _.toURI.toURL }
- val sepLoader = new URLClassLoader(sepUrls, null)
-
- if (fileManager.debug)
- println("Loading classes from:\n" + sepUrls.mkString("\n"))
-
- val paths = (if (classPath.isEmpty) files.slice(0, 4) else files) map { _.getPath }
- val newClasspath = paths mkString pathSeparator
-
- syspropset("java.class.path", newClasspath)
- syspropset("scala.home", "")
-
- if (fileManager.debug)
- for (prop <- List("java.class.path", "sun.boot.class.path", "java.ext.dirs"))
- println(prop + ": " + sysprop(prop))
-
- try {
- val sepRunnerClass = sepLoader loadClass sepRunnerClassName
- val sepRunner = sepRunnerClass.newInstance()
- val sepMainMethod = sepRunnerClass.getMethod("main", Array(classOf[String]): _*)
- val cargs: Array[AnyRef] = Array(args)
- sepMainMethod.invoke(sepRunner, cargs: _*)
- }
- catch {
- case cnfe: ClassNotFoundException =>
- cnfe.printStackTrace()
- NestUI.failure(sepRunnerClassName +" could not be loaded from:\n")
- sepUrls foreach (x => NestUI.failure(x + "\n"))
- }
- }
-}
diff --git a/src/partest/scala/tools/partest/nest/RunnerUtils.scala b/src/partest/scala/tools/partest/nest/RunnerUtils.scala
deleted file mode 100644
index 4e41d00bf1..0000000000
--- a/src/partest/scala/tools/partest/nest/RunnerUtils.scala
+++ /dev/null
@@ -1,42 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2010 LAMP/EPFL
- * @author Philipp Haller
- */
-
-// $Id$
-
-package scala.tools.partest
-package nest
-
-trait RunnerUtils {
-
- def searchPath(option: String, as: List[String]): Option[String] = {
- val Option = option
- as match {
- case Option :: r :: rs => Some(r)
- case other :: rest => searchPath(option, rest)
- case List() => None
- }
- }
-
- def searchAndRemovePath(option: String, as: List[String]): (Option[String], List[String]) = {
- val Option = option
- def search(before: List[String], after: List[String]): (Option[String], List[String]) = after match {
- case Option :: r :: rs => (Some(r), before ::: rs)
- case other :: rest => search(before ::: List(other), rest)
- case List() => (None, before)
- }
- search(List(), as)
- }
-
- def searchAndRemoveOption(option: String, as: List[String]): (Boolean, List[String]) = {
- val Option = option
- def search(before: List[String], after: List[String]): (Boolean, List[String]) = after match {
- case Option :: rest => (true, before ::: rest)
- case other :: rest => search(before ::: List(other), rest)
- case List() => (false, before)
- }
- search(List(), as)
- }
-
-}
diff --git a/src/partest/scala/tools/partest/nest/StreamAppender.scala b/src/partest/scala/tools/partest/nest/StreamAppender.scala
deleted file mode 100644
index c4636af323..0000000000
--- a/src/partest/scala/tools/partest/nest/StreamAppender.scala
+++ /dev/null
@@ -1,90 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2010 LAMP/EPFL
- * @author Philipp Haller
- */
-
-// $Id$
-
-package scala.tools.partest
-package nest
-
-import java.io.{Writer, PrintWriter, Reader, BufferedReader,
- IOException, InputStream, StringWriter, InputStreamReader,
- OutputStreamWriter, StringReader, OutputStream}
-
-object StreamAppender {
-
- def apply(reader: BufferedReader, writer: Writer): StreamAppender = {
- val pwriter = new PrintWriter(writer, true)
- new StreamAppender(reader, pwriter)
- }
-
- def apply(reader: Reader, writer: Writer): StreamAppender =
- apply(new BufferedReader(reader), writer)
-
- def appendToString(in1: InputStream, in2: InputStream): String = {
- val swriter1 = new StringWriter
- val swriter2 = new StringWriter
- val reader1 = new BufferedReader(new InputStreamReader(in1))
- val reader2 = new BufferedReader(new InputStreamReader(in2))
- val app1 = StreamAppender(reader1, swriter1)
- val app2 = StreamAppender(reader2, swriter2)
-
- val async = new Thread(app2)
- async.start()
- app1.run()
- async.join()
- swriter1.toString + swriter2.toString
- }
-/*
- private def inParallel(t1: Runnable, t2: Runnable, t3: Runnable) {
- val thr1 = new Thread(t1)
- val thr2 = new Thread(t2)
- thr1.start()
- thr2.start()
- t3.run()
- thr1.join()
- thr2.join()
- }
-*/
- private def inParallel(t1: Runnable, t2: Runnable) {
- val thr = new Thread(t2)
- thr.start()
- t1.run()
- thr.join()
- }
-
- def concat(in: InputStream, err: InputStream, out: OutputStream) = new Runnable {
- override def run() {
- val outWriter = new PrintWriter(new OutputStreamWriter(out), true)
- val inApp = new StreamAppender(new BufferedReader(new InputStreamReader(in)),
- outWriter)
- val errStringWriter = new StringWriter
- val errApp = StreamAppender(new BufferedReader(new InputStreamReader(err)),
- errStringWriter)
- inParallel(inApp, errApp)
-
- // append error string to out
- val errStrApp = new StreamAppender(new BufferedReader(new StringReader(errStringWriter.toString)),
- outWriter)
- errStrApp.run()
- }
- }
-}
-
-class StreamAppender(reader: BufferedReader, writer: PrintWriter) extends Runnable {
- override def run() = runAndMap(identity)
-
- def runAndMap(f: String => String) {
- try {
- var line = reader.readLine()
- while (line != null) {
- writer.println(f(line))
- line = reader.readLine()
- }
- } catch {
- case e: IOException =>
- e.printStackTrace()
- }
- }
-}
diff --git a/src/partest/scala/tools/partest/nest/TestFile.scala b/src/partest/scala/tools/partest/nest/TestFile.scala
deleted file mode 100644
index 7ffe11e5b6..0000000000
--- a/src/partest/scala/tools/partest/nest/TestFile.scala
+++ /dev/null
@@ -1,109 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2010 LAMP/EPFL
- * @author Philipp Haller
- */
-
-// $Id$
-
-package scala.tools.partest
-package nest
-
-import java.io.{File, BufferedReader, FileReader}
-import scala.tools.nsc.Settings
-
-class TestFile(kind: String, val file: File, val fileManager: FileManager, createOutDir: Boolean) {
- val dir = file.getParentFile
- val dirpath = dir.getAbsolutePath
- val fileBase: String = basename(file.getName)
-
- // @mutates settings
- protected def baseSettings(settings: Settings) {
- settings.classpath.value = settings.classpath.value+
- File.pathSeparator+dirpath
- if (createOutDir)
- settings.outdir.value = {
- val outDir = new File(dir, fileBase + "-" + kind + ".obj")
- if (!outDir.exists)
- outDir.mkdir()
- outDir.toString
- }
-
- // add additional flags found in 'testname.flags'
- val flagsFile = new File(dir, fileBase + ".flags")
- if (flagsFile.exists) {
- val reader = new BufferedReader(new java.io.FileReader(flagsFile))
- val flags = reader.readLine
- if (flags ne null)
- settings.parseParams(settings.splitParams(flags))
- }
- }
-
- def defineSettings(settings: Settings) {
- baseSettings(settings)
- }
-
- private def basename(name: String): String = {
- val inx = name.lastIndexOf(".")
- if (inx < 0) name else name.substring(0, inx)
- }
-
- override def toString(): String = kind+" "+file
-}
-
-case class PosTestFile(override val file: File, override val fileManager: FileManager, createOutDir: Boolean) extends TestFile("pos", file, fileManager, createOutDir) {
- override def defineSettings(settings: Settings) {
- baseSettings(settings)
- settings.classpath.value = settings.classpath.value+
- File.pathSeparator+fileManager.CLASSPATH
- }
-}
-
-case class NegTestFile(override val file: File, override val fileManager: FileManager, createOutDir: Boolean) extends TestFile("neg", file, fileManager, createOutDir) {
- override def defineSettings(settings: Settings) {
- baseSettings(settings)
- settings.classpath.value = settings.classpath.value+
- File.pathSeparator+fileManager.CLASSPATH
- }
-}
-
-case class RunTestFile(override val file: File, override val fileManager: FileManager, createOutDir: Boolean) extends TestFile("run", file, fileManager, createOutDir) {
- override def defineSettings(settings: Settings) {
- baseSettings(settings)
- settings.classpath.value = settings.classpath.value+
- File.pathSeparator+fileManager.CLASSPATH
- }
-}
-
-case class ScalaCheckTestFile(override val file: File, override val fileManager: FileManager, createOutDir: Boolean) extends TestFile("scalacheck", file, fileManager, createOutDir) {
- override def defineSettings(settings: Settings) {
- baseSettings(settings)
- settings.classpath.value = settings.classpath.value+
- File.pathSeparator+fileManager.CLASSPATH
- }
-}
-
-case class JvmTestFile(override val file: File, override val fileManager: FileManager, createOutDir: Boolean) extends TestFile("jvm", file, fileManager, createOutDir) {
- override def defineSettings(settings: Settings) {
- baseSettings(settings)
- settings.classpath.value = settings.classpath.value+
- File.pathSeparator+fileManager.CLASSPATH
- }
-}
-
-case class ShootoutTestFile(override val file: File, override val fileManager: FileManager, createOutDir: Boolean) extends TestFile("shootout", file, fileManager, createOutDir) {
- override def defineSettings(settings: Settings) {
- baseSettings(settings)
- settings.classpath.value = settings.classpath.value+
- File.pathSeparator+fileManager.CLASSPATH
- settings.outdir.value = file.getParent
- }
-}
-
-case class ScalapTestFile(override val file: File, override val fileManager: FileManager, createOutDir: Boolean) extends TestFile("scalap", file, fileManager, createOutDir) {
- override def defineSettings(settings: Settings) {
- baseSettings(settings)
- settings.classpath.value = settings.classpath.value+
- File.pathSeparator+fileManager.CLASSPATH
- settings.outdir.value = file.getParent
- }
-}
diff --git a/src/partest/scala/tools/partest/nest/Worker.scala b/src/partest/scala/tools/partest/nest/Worker.scala
deleted file mode 100644
index 37a98860ad..0000000000
--- a/src/partest/scala/tools/partest/nest/Worker.scala
+++ /dev/null
@@ -1,1000 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2010 LAMP/EPFL
- * @author Philipp Haller
- */
-
-// $Id$
-
-package scala.tools.partest
-package nest
-
-import java.io._
-import java.net.{URLClassLoader, URL}
-import java.util.{Timer, TimerTask}
-
-import scala.tools.nsc.{ObjectRunner, GenericRunnerCommand}
-import scala.tools.nsc.io
-
-import scala.actors.{Actor, Exit, TIMEOUT}
-import scala.actors.Actor._
-import scala.tools.scalap.scalax.rules.scalasig.{ByteCode, ClassFileParser, ScalaSigAttributeParsers}
-
-import scala.collection.mutable.HashMap
-
-case class RunTests(kind: String, files: List[File])
-case class Results(succ: Int, fail: Int, logs: List[LogFile], outdirs: List[File])
-case class LogContext(file: LogFile, writers: Option[(StringWriter, PrintWriter)])
-
-abstract class TestResult {
- def file: File
-}
-case class Result(override val file: File, context: LogContext) extends TestResult
-case class Timeout(override val file: File) extends TestResult
-
-class LogFile(parent: File, child: String) extends File(parent, child) {
- var toDelete = false
-}
-
-class Worker(val fileManager: FileManager) extends Actor {
- import fileManager._
- import scala.tools.nsc.{Settings, CompilerCommand, Global}
- import scala.tools.nsc.reporters.ConsoleReporter
- import scala.tools.nsc.util.FakePos
-
- var reporter: ConsoleReporter = _
- val timer = new Timer
-
- def error(msg: String) {
- reporter.error(FakePos("scalac"),
- msg + "\n scalac -help gives more information")
- }
-
- def act() {
- react {
- case RunTests(kind, files) =>
- NestUI.verbose("received "+files.length+" to test")
- val master = sender
- runTests(kind, files, (succ: Int, fail: Int) => {
- master ! Results(succ, fail, createdLogFiles, createdOutputDirs)
- })
- }
- }
-
- private def basename(name: String): String = {
- val inx = name.lastIndexOf(".")
- if (inx < 0) name else name.substring(0, inx)
- }
-
- def printInfoStart(file: File, printer: PrintWriter) {
- NestUI.outline("testing: ", printer)
- val filesdir = file.getAbsoluteFile.getParentFile.getParentFile
- val testdir = filesdir.getParentFile
- val totalWidth = 56
- val name = {
- // 1. try with [...]/files/run/test.scala
- val testPathLen = testdir.getAbsolutePath.length
- val name = file.getAbsolutePath.substring(testPathLen)
- if (name.length <= totalWidth)
- name
- // 2. try with [...]/run/test.scala
- else {
- val filesPathLen = filesdir.getAbsolutePath.length
- file.getAbsolutePath.substring(filesPathLen)
- }
- }
- NestUI.normal("[...]"+name+(List.fill(totalWidth-name.length)(' ')).mkString, printer)
- }
-
- def printInfoEnd(success: Boolean, printer: PrintWriter) {
- NestUI.normal("[", printer)
- if (success) NestUI.success(" OK ", printer)
- else NestUI.failure("FAILED", printer)
- NestUI.normal("]\n", printer)
- }
-
- def printInfoTimeout(printer: PrintWriter) {
- NestUI.normal("[", printer)
- NestUI.failure("TIMOUT", printer)
- NestUI.normal("]\n", printer)
- }
-
- var log = ""
- var createdLogFiles: List[LogFile] = List()
- var createdOutputDirs: List[File] = List()
-
- def createLogFile(file: File, kind: String): LogFile = {
- val logFile = fileManager.getLogFile(file, kind)
- createdLogFiles = logFile :: createdLogFiles
- logFile
- }
-
- def createOutputDir(dir: File, fileBase: String, kind: String): File = {
- val outDir = io.Path(dir) / io.Directory("%s-%s.obj".format(fileBase, kind))
- outDir.createDirectory()
- createdOutputDirs ::= outDir.jfile
- outDir.jfile
- }
-
- /* Note: not yet used/tested. */
- def execTestObjectRunner(file: File, outDir: File, logFile: File) {
- val consFM = new ConsoleFileManager
- import consFM.{latestCompFile, latestLibFile, latestActFile,
- latestPartestFile}
-
- val classpath: List[URL] =
- outDir.toURI.toURL ::
- //List(file.getParentFile.toURI.toURL) :::
- List(latestCompFile.toURI.toURL, latestLibFile.toURI.toURL, latestActFile.toURI.toURL, latestPartestFile.toURI.toURL) :::
- ((CLASSPATH split File.pathSeparatorChar).toList map (x => new File(x).toURI.toURL))
-
- NestUI.verbose("ObjectRunner classpath: "+classpath)
-
- try {
- // configure input/output files
- val logOut = new FileOutputStream(logFile)
- val logWriter = new PrintStream(logOut)
-
- // grab global lock
- fileManager.synchronized {
-
- val oldStdOut = System.out
- val oldStdErr = System.err
- System.setOut(logWriter)
- System.setErr(logWriter)
-
- /*
- " -Djava.library.path="+logFile.getParentFile.getAbsolutePath+
- " -Dscalatest.output="+outDir.getAbsolutePath+
- " -Dscalatest.lib="+LATEST_LIB+
- " -Dscalatest.cwd="+outDir.getParent+
- " -Djavacmd="+JAVACMD+
- */
-
- System.setProperty("java.library.path", logFile.getParentFile.getCanonicalFile.getAbsolutePath)
- System.setProperty("scalatest.output", outDir.getCanonicalFile.getAbsolutePath)
- System.setProperty("scalatest.lib", LATEST_LIB)
- System.setProperty("scalatest.cwd", outDir.getParent)
-
- ObjectRunner.run(classpath, "Test", List("jvm"))
-
- logWriter.flush()
- logWriter.close()
-
- System.setOut(oldStdOut)
- System.setErr(oldStdErr)
- }
-
- /*val out = new FileOutputStream(logFile, true)
- Console.withOut(new PrintStream(out)) {
- ObjectRunner.run(classpath, "Test", List("jvm"))
- }
- out.flush
- out.close*/
- } catch {
- case e: Exception =>
- NestUI.verbose(e+" ("+file.getPath+")")
- e.printStackTrace()
- }
- }
-
- def javac(outDir: File, files: List[File], output: File): Boolean = {
- // compile using command-line javac compiler
- val javacCmd = if ((fileManager.JAVAC_CMD.indexOf("${env.JAVA_HOME}") != -1) ||
- fileManager.JAVAC_CMD.equals("/bin/javac") ||
- fileManager.JAVAC_CMD.equals("\\bin\\javac"))
- "javac"
- else
- fileManager.JAVAC_CMD
-
- val cmd = javacCmd+
- " -d "+outDir.getAbsolutePath+
- " -classpath "+outDir+File.pathSeparator+CLASSPATH+
- " "+files.mkString(" ")
-
- val (success, msg) = try {
- val exitCode = runCommand(cmd, output)
- NestUI.verbose("javac returned exit code: "+exitCode)
- if (exitCode != 0)
- (false, "Running \"javac\" failed with exit code: "+exitCode+"\n"+cmd+"\n")
- else
- (true, "")
- } catch {
- case e: Exception =>
- val swriter = new StringWriter
- e.printStackTrace(new PrintWriter(swriter))
- (false, "Running \"javac\" failed:\n"+cmd+"\n"+swriter.toString+"\n")
- }
- if (!success) {
- val writer = new PrintWriter(new FileWriter(output, true), true)
- writer.print(msg)
- writer.close()
- }
- success
- }
-
- /** Runs <code>command</code> redirecting standard out and
- * error out to <code>output</code> file.
- */
- def runCommand(command: String, output: File): Int = {
- NestUI.verbose("running command:\n"+command)
- val proc = Runtime.getRuntime.exec(command)
- val in = proc.getInputStream
- val err = proc.getErrorStream
- val writer = new PrintWriter(new FileWriter(output), true)
- val inApp = new StreamAppender(new BufferedReader(new InputStreamReader(in)),
- writer)
- val errApp = new StreamAppender(new BufferedReader(new InputStreamReader(err)),
- writer)
- val async = new Thread(errApp)
- async.start()
- inApp.run()
- async.join()
- writer.close()
- try {
- proc.exitValue()
- } catch {
- case e: IllegalThreadStateException => 0
- }
- }
-
- def execTest(outDir: File, logFile: File, fileBase: String) {
- // check whether there is a ".javaopts" file
- val argsFile = new File(logFile.getParentFile, fileBase+".javaopts")
- val argString = if (argsFile.exists) {
- NestUI.verbose("Found javaopts file: "+argsFile)
- val fileReader = new FileReader(argsFile)
- val reader = new BufferedReader(fileReader)
- val options = reader.readLine()
- reader.close()
- NestUI.verbose("Found javaopts file '%s', using options: '%s'".format(argsFile, options))
- options
- } else ""
-
- val cp = System.getProperty("java.class.path", ".")
- NestUI.verbose("java.class.path: "+cp)
-
- def quote(path: String) = "\""+path+"\""
-
- // Note! As this currently functions, JAVA_OPTS must precede argString
- // because when an option is repeated to java only the last one wins.
- // That means until now all the .javaopts files were being ignored because
- // they all attempt to change options which are also defined in
- // scalatest.java_opts, leading to debug output like:
- //
- // debug: Found javaopts file 'files/shootout/message.scala-2.javaopts', using options: '-Xss32k'
- // debug: java -Xss32k -Xss2m -Xms256M -Xmx1024M -classpath [...]
- val cmd =
- JAVACMD+
- " "+JAVA_OPTS+
- " "+argString+
- " -classpath "+outDir+File.pathSeparator+CLASSPATH+
- " -Djava.library.path="+logFile.getParentFile.getAbsolutePath+
- " -Dscalatest.output="+outDir.getAbsolutePath+
- " -Dscalatest.lib="+LATEST_LIB+
- " -Dscalatest.cwd="+outDir.getParent+
- " -Djavacmd="+JAVACMD+
- " -Duser.language=en -Duser.country=US"+
- " scala.tools.nsc.MainGenericRunner"+
- " Test jvm"
- NestUI.verbose(cmd)
-
- runCommand(cmd, logFile)
-
- if (fileManager.showLog) {
- // produce log as string in `log`
- val reader = new BufferedReader(new FileReader(logFile))
- val swriter = new StringWriter
- val pwriter = new PrintWriter(swriter, true)
- val appender = new StreamAppender(reader, pwriter)
- appender.run()
- log = swriter.toString
- }
- }
-
- def existsCheckFile(dir: File, fileBase: String, kind: String) = {
- val checkFile = {
- val chkFile = new File(dir, fileBase + ".check")
- if (chkFile.isFile)
- chkFile
- else
- new File(dir, fileBase + "-" + kind + ".check")
- }
- checkFile.exists && checkFile.canRead
- }
-
- def compareOutput(dir: File, fileBase: String, kind: String, logFile: File): String = {
- def getCheckFile(s: String) = {
- val f = io.Path(dir) / io.File("%s%s.check".format(fileBase, s))
- if (f.isFile && f.canRead) Some(f) else None
- }
-
- // if check file exists, compare with log file
- (getCheckFile("") orElse getCheckFile("-" + kind)) match {
- case Some(f) => fileManager.compareFiles(logFile, f.jfile)
- case _ => file2String(logFile)
- }
- }
-
- def file2String(logFile: File) = io.File(logFile).slurp()
-
- /** Runs a list of tests.
- *
- * @param kind The test kind (pos, neg, run, etc.)
- * @param files The list of test files
- */
- def runTests(kind: String, files: List[File], topcont: (Int, Int) => Unit) {
- val compileMgr = new CompileManager(fileManager)
- var errors = 0
- var succeeded = true
- var diff = ""
- var log = ""
-
- /** 1. Creates log file and output directory.
- * 2. Runs <code>script</code> function, providing log file and
- * output directory as arguments.
- */
- def runInContext(file: File, kind: String, script: (File, File) => Unit): LogContext = {
- // when option "--failed" is provided
- // execute test only if log file is present
- // (which means it failed before)
- val logFile = createLogFile(file, kind)
- if (!fileManager.failed || (logFile.exists && logFile.canRead)) {
- val swr = new StringWriter
- val wr = new PrintWriter(swr)
- succeeded = true
- diff = ""
- log = ""
- printInfoStart(file, wr)
-
- val fileBase: String = basename(file.getName)
- NestUI.verbose(this+" running test "+fileBase)
- val dir = file.getParentFile
- val outDir = createOutputDir(dir, fileBase, kind)
- NestUI.verbose("output directory: "+outDir)
-
- // run test-specific code
- try {
- script(logFile, outDir)
- } catch {
- case e: Exception =>
- val writer = new PrintWriter(new FileWriter(logFile), true)
- e.printStackTrace(writer)
- writer.close()
- succeeded = false
- }
-
- LogContext(logFile, Some((swr, wr)))
- } else
- LogContext(logFile, None)
- }
-
- def compileFilesIn(dir: File, kind: String, logFile: File, outDir: File) {
- val testFiles = dir.listFiles.toList
-
- val groups = for (i <- 0 to 9) yield testFiles filter { f =>
- f.getName.endsWith("_"+i+".java") ||
- f.getName.endsWith("_"+i+".scala") }
-
- val noSuffix = testFiles filter { f =>
- !groups.exists(_ contains f) && (
- f.getName.endsWith(".java") ||
- f.getName.endsWith(".scala")) }
-
- def compileGroup(g: List[File]) {
- val scalaFiles = g.filter(_.getName.endsWith(".scala"))
- val javaFiles = g.filter(_.getName.endsWith(".java"))
-
- if (!scalaFiles.isEmpty &&
- !compileMgr.shouldCompile(outDir,
- javaFiles ::: scalaFiles,
- kind, logFile)) {
- NestUI.verbose("scalac: compilation of "+g+" failed\n")
- succeeded = false
- }
-
- if (succeeded && !javaFiles.isEmpty) {
- succeeded = javac(outDir, javaFiles, logFile)
- if (succeeded && !scalaFiles.isEmpty
- && !compileMgr.shouldCompile(outDir,
- scalaFiles,
- kind, logFile)) {
- NestUI.verbose("scalac: compilation of "+scalaFiles+" failed\n")
- succeeded = false
- }
- }
- }
-
- if (!noSuffix.isEmpty)
- compileGroup(noSuffix)
- for (grp <- groups) {
- if (succeeded)
- compileGroup(grp)
- }
- }
-
- def failCompileFilesIn(dir: File, kind: String, logFile: File, outDir: File) {
- val testFiles = dir.listFiles.toList
- val javaFiles = testFiles.filter(_.getName.endsWith(".java"))
- val scalaFiles = testFiles.filter(_.getName.endsWith(".scala"))
- if (!(scalaFiles.isEmpty && javaFiles.isEmpty) &&
- !compileMgr.shouldFailCompile(outDir, javaFiles ::: scalaFiles, kind, logFile)) {
- NestUI.verbose("compilation of "+scalaFiles+" failed\n")
- succeeded = false
- }
- }
-
- def runJvmTest(file: File, kind: String): LogContext =
- runInContext(file, kind, (logFile: File, outDir: File) => {
- if (file.isDirectory) {
- compileFilesIn(file, kind, logFile, outDir)
- } else if (!compileMgr.shouldCompile(List(file), kind, logFile)) {
- NestUI.verbose("compilation of "+file+" failed\n")
- succeeded = false
- }
- if (succeeded) { // run test
- val fileBase = basename(file.getName)
- val dir = file.getParentFile
-
- //TODO: detect whether we have to use Runtime.exec
- val useRuntime = true
-
- if (useRuntime)
- execTest(outDir, logFile, fileBase)
- else
- execTestObjectRunner(file, outDir, logFile)
- NestUI.verbose(this+" finished running "+fileBase)
-
- diff = compareOutput(dir, fileBase, kind, logFile)
- if (!diff.equals("")) {
- NestUI.verbose("output differs from log file\n")
- succeeded = false
- }
- }
- })
-
- def processSingleFile(file: File): LogContext = kind match {
- case "scalacheck" =>
- runInContext(file, kind, (logFile: File, outDir: File) => {
- if (file.isDirectory) {
- compileFilesIn(file, kind, logFile, outDir)
- } else if (!compileMgr.shouldCompile(List(file), kind, logFile)) {
- NestUI.verbose("compilation of "+file+" failed\n")
- succeeded = false
- }
- if (succeeded) {
- val consFM = new ConsoleFileManager
- import consFM.{latestCompFile, latestLibFile, latestActFile,
- latestPartestFile}
-
- NestUI.verbose("compilation of "+file+" succeeded\n")
-
- val libs = new File(fileManager.LIB_DIR)
- val scalacheckURL = (new File(libs, "ScalaCheck.jar")).toURI.toURL
- val outURL = outDir.getCanonicalFile.toURI.toURL
- val classpath: List[URL] =
- List(outURL, scalacheckURL, latestCompFile.toURI.toURL, latestLibFile.toURI.toURL,
- latestActFile.toURI.toURL, latestPartestFile.toURI.toURL).removeDuplicates
-
- // XXX this is a big cut-and-paste mess, but the revamp is coming
- val logOut = new FileOutputStream(logFile)
- val logWriter = new PrintStream(logOut)
- val oldStdOut = System.out
- val oldStdErr = System.err
- System.setOut(logWriter)
- System.setErr(logWriter)
-
- ObjectRunner.run(classpath, "Test", Nil)
-
- logWriter.flush()
- logWriter.close()
- System.setOut(oldStdOut)
- System.setErr(oldStdErr)
-
- NestUI.verbose(io.File(logFile).slurp())
- // obviously this must be improved upon
- succeeded = io.File(logFile).lines() forall (_ contains " OK")
- }
- })
-
- case "pos" =>
- runInContext(file, kind, (logFile: File, outDir: File) => {
- if (file.isDirectory) {
- compileFilesIn(file, kind, logFile, outDir)
- } else if (!compileMgr.shouldCompile(List(file), kind, logFile)) {
- NestUI.verbose("compilation of "+file+" failed\n")
- succeeded = false
- }
- })
-
- case "neg" =>
- runInContext(file, kind, (logFile: File, outDir: File) => {
- if (file.isDirectory) {
- failCompileFilesIn(file, kind, logFile, outDir)
- } else if (!compileMgr.shouldFailCompile(List(file), kind, logFile)) {
- succeeded = false
- }
- if (succeeded) { // compare log file to check file
- val fileBase = basename(file.getName)
- val dir = file.getParentFile
- if (!existsCheckFile(dir, fileBase, kind)) {
- // diff is contents of logFile
- diff = file2String(logFile)
- } else
- diff = compareOutput(dir, fileBase, kind, logFile)
-
- if (!diff.equals("")) {
- NestUI.verbose("output differs from log file\n")
- succeeded = false
- }
- }
- })
-
- case "run" =>
- runJvmTest(file, kind)
-
- case "jvm" =>
- runJvmTest(file, kind)
-
- case "res" => {
- // when option "--failed" is provided
- // execute test only if log file is present
- // (which means it failed before)
-
- //val (logFileOut, logFileErr) = createLogFiles(file, kind)
- val logFile = createLogFile(file, kind)
- if (!fileManager.failed || (logFile.exists && logFile.canRead)) {
- val swr = new StringWriter
- val wr = new PrintWriter(swr)
- succeeded = true; diff = ""; log = ""
- printInfoStart(file, wr)
-
- val fileBase: String = basename(file.getName)
- NestUI.verbose(this+" running test "+fileBase)
- val dir = file.getParentFile
- val outDir = createOutputDir(dir, fileBase, kind)
- if (!outDir.exists) outDir.mkdir()
- val resFile = new File(dir, fileBase + ".res")
- NestUI.verbose("outDir: "+outDir)
- NestUI.verbose("logFile: "+logFile)
- //NestUI.verbose("logFileErr: "+logFileErr)
- NestUI.verbose("resFile: "+resFile)
-
- // run compiler in resident mode
- // $SCALAC -d "$os_dstbase".obj -Xresident -sourcepath . "$@"
-
- try {
-
- val sourcedir = logFile.getParentFile.getCanonicalFile
- val sourcepath = sourcedir.getAbsolutePath+File.separator
- NestUI.verbose("sourcepath: "+sourcepath)
-
- val argString =
- "-d "+outDir.getCanonicalFile.getAbsolutePath+
- " -Xresident"+
- " -sourcepath "+sourcepath
- val argList = argString split ' ' toList
-
- // configure input/output files
- val logOut = new FileOutputStream(logFile)
- val logWriter = new PrintStream(logOut)
- val resReader = new BufferedReader(new FileReader(resFile))
- val logConsoleWriter = new PrintWriter(new OutputStreamWriter(logOut))
-
- // create compiler
- val settings = new Settings(error)
- settings.sourcepath.value = sourcepath
- settings.classpath.value = fileManager.CLASSPATH
- reporter = new ConsoleReporter(settings, scala.Console.in, logConsoleWriter)
- val command = new CompilerCommand(argList, settings, error, false)
- object compiler extends Global(command.settings, reporter)
-
- // simulate resident compiler loop
- val prompt = "\nnsc> "
-
- val resCompile = (line: String) => {
- NestUI.verbose("compiling "+line)
- val cmdArgs = (line split ' ').toList map (fs => new File(dir, fs).getAbsolutePath)
- NestUI.verbose("cmdArgs: "+cmdArgs)
- val sett = new Settings(error)
- sett.sourcepath.value = sourcepath
- val command = new CompilerCommand(cmdArgs, sett, error, true)
- (new compiler.Run) compile command.files
- }
-
- def loop(action: (String) => Unit) {
- logWriter.print(prompt)
- val line = resReader.readLine()
- if ((line ne null) && line.length() > 0) {
-/*
- val parent = self
- self.trapExit = true
- val child = link {
- action(line)
- }
-
- receiveWithin(fileManager.timeout.toLong) {
- case TIMEOUT =>
- NestUI.verbose("action timed out")
- false
- case Exit(from, reason) if from == child => reason match {
- case 'normal => // do nothing
- case t: Throwable =>
- NestUI.verbose("while invoking compiler:")
- NestUI.verbose("caught "+t)
- t.printStackTrace
- if (t.getCause != null)
- t.getCause.printStackTrace
- false
- }
- }
-*/
- action(line)
- loop(action)
- }
- }
- val oldStdOut = System.out
- val oldStdErr = System.err
- System.setOut(logWriter)
- System.setErr(logWriter)
- loop(resCompile)
- resReader.close()
- logWriter.flush()
- logWriter.close()
-
- System.setOut(oldStdOut)
- System.setErr(oldStdErr)
-
- val tempLogFile = new File(dir, fileBase+".temp.log")
- val logFileReader = new BufferedReader(new FileReader(logFile))
- val tempLogFilePrinter = new PrintWriter(new FileWriter(tempLogFile))
- val appender =
- new StreamAppender(logFileReader, tempLogFilePrinter)
-
- // function that removes a given string from another string
- def removeFrom(line: String, path: String): String = {
- // find `path` in `line`
- val index = line.indexOf(path)
- if (index != -1) {
- line.substring(0, index) + line.substring(index + path.length, line.length)
- } else line
- }
-
- appender.runAndMap({ s =>
- val woPath = removeFrom(s, dir.getAbsolutePath/*.replace(File.separatorChar,'/')*/+File.separator)
- // now replace single '\' with '/'
- woPath.replace('\\', '/')
- })
- logFileReader.close()
- tempLogFilePrinter.close()
-
- val tempLogFileReader = new BufferedReader(new FileReader(tempLogFile))
- val logFilePrinter= new PrintWriter(new FileWriter(logFile), true)
- (new StreamAppender(tempLogFileReader, logFilePrinter)).run
- tempLogFileReader.close()
- logFilePrinter.close()
-
- tempLogFile.delete()
-
- diff = compareOutput(dir, fileBase, kind, logFile)
- if (!diff.equals("")) {
- NestUI.verbose("output differs from log file\n")
- succeeded = false
- }
-
- } catch {
- case e: Exception =>
- e.printStackTrace()
- succeeded = false
- }
-
- LogContext(logFile, Some((swr, wr)))
- } else
- LogContext(logFile, None)
- }
-
- case "shootout" => {
- // when option "--failed" is provided
- // execute test only if log file is present
- // (which means it failed before)
- val logFile = createLogFile(file, kind)
- if (!fileManager.failed || (logFile.exists && logFile.canRead)) {
- val swr = new StringWriter
- val wr = new PrintWriter(swr)
- succeeded = true; diff = ""; log = ""
- printInfoStart(file, wr)
-
- val fileBase: String = basename(file.getName)
- NestUI.verbose(this+" running test "+fileBase)
- val dir = file.getParentFile
- val outDir = createOutputDir(dir, fileBase, kind)
- if (!outDir.exists) outDir.mkdir()
-
- // 2. define file {outDir}/test.scala that contains code to compile/run
- val testFile = new File(outDir, "test.scala")
- NestUI.verbose("outDir: "+outDir)
- NestUI.verbose("logFile: "+logFile)
- NestUI.verbose("testFile: "+testFile)
-
- // 3. cat {test}.scala.runner {test}.scala > testFile
- val runnerFile = new File(dir, fileBase+".scala.runner")
- val bodyFile = new File(dir, fileBase+".scala")
- val appender = StreamAppender.concat(new FileInputStream(runnerFile),
- new FileInputStream(bodyFile),
- new FileOutputStream(testFile))
- appender.run()
-
- try { // *catch-all*
- // 4. compile testFile
- if (!compileMgr.shouldCompile(List(testFile), kind, logFile)) {
- NestUI.verbose("compilation of "+file+" failed\n")
- succeeded = false
- } else {
- NestUI.verbose("compilation of "+testFile+"succeeded")
- // -------- run test --------
-
- //TODO: detect whether we have to use Runtime.exec
- val useRuntime = true
-
- if (useRuntime)
- execTest(outDir, logFile, fileBase)
- else
- execTestObjectRunner(file, outDir, logFile)
- NestUI.verbose(this+" finished running "+fileBase)
- } // successful compile
- } catch { // *catch-all*
- case e: Exception =>
- NestUI.verbose("caught "+e)
- succeeded = false
- }
-
- diff = compareOutput(dir, fileBase, kind, logFile)
- if (!diff.equals("")) {
- NestUI.verbose("output differs from log file\n")
- succeeded = false
- }
-
- LogContext(logFile, Some((swr, wr)))
- } else
- LogContext(logFile, None)
- }
-
- case "scalap" => {
-
- def decompileFile(clazz: Class[_], packObj: Boolean) = {
- val byteCode = ByteCode.forClass(clazz)
- val classFile = ClassFileParser.parse(byteCode)
- val Some(sig) = classFile.attribute("ScalaSig").map(_.byteCode).map(ScalaSigAttributeParsers.parse)
- import scala.tools.scalap.Main._
- parseScalaSignature(sig, packObj)
- }
-
- runInContext(file, kind, (logFile: File, outDir: File) => {
- val sourceDir = file.getParentFile
- val sourceDirName = sourceDir.getName
-
- // 1. Find file with result text
- val results = sourceDir.listFiles(new FilenameFilter {
- def accept(dir: File, name: String) = name == "result.test"
- })
-
- if (results.length != 1) {
- NestUI.verbose("Result file not found in directory " + sourceDirName + " \n")
- } else {
- val resFile = results(0)
- // 2. Compile source file
- if (!compileMgr.shouldCompile(outDir, List(file), kind, logFile)) {
- NestUI.verbose("compilerMgr failed to compile %s to %s".format(file, outDir))
- succeeded = false
- } else {
-
- // 3. Decompile file and compare results
- val isPackageObject = sourceDir.getName.startsWith("package")
- val className = sourceDirName.capitalize + (if (!isPackageObject) "" else ".package")
- val url = outDir.toURI.toURL
- val loader = new URLClassLoader(Array(url), getClass.getClassLoader)
- val clazz = loader.loadClass(className)
-
- val result = decompileFile(clazz, isPackageObject)
-
- try {
- val fstream = new FileWriter(logFile);
- val out = new BufferedWriter(fstream);
- out.write(result)
- out.close();
- } catch {
- case e: IOException => NestUI.verbose(e.getMessage()); succeeded = false
- }
-
- val diff = fileManager.compareFiles(logFile, resFile)
- if (!diff.equals("")) {
- NestUI.verbose("output differs from log file\n")
- succeeded = false
- }
- }
- }
- })
- }
-
- case "script" => {
- val osName = System.getProperty("os.name", "")
- // when option "--failed" is provided
- // execute test only if log file is present
- // (which means it failed before)
- val logFile = createLogFile(file, kind)
- if (!fileManager.failed || (logFile.exists && logFile.canRead)) {
- val swr = new StringWriter
- val wr = new PrintWriter(swr)
- succeeded = true; diff = ""; log = ""
- printInfoStart(file, wr)
-
- val fileBase: String = basename(file.getName)
- NestUI.verbose(this+" running test "+fileBase)
-
- // check whether there is an args file
- val argsFile = new File(file.getParentFile, fileBase+".args")
- NestUI.verbose("argsFile: "+argsFile)
- val argString = if (argsFile.exists) {
- val swriter = new StringWriter
- val app = StreamAppender(new BufferedReader(new FileReader(argsFile)),
- swriter)
- app.run()
- " "+swriter.toString
- } else ""
-
- try {
- val cmdString =
- if (osName startsWith "Windows") {
- val batchFile = new File(file.getParentFile, fileBase+".bat")
- NestUI.verbose("batchFile: "+batchFile)
- batchFile.getAbsolutePath
- }
- else file.getAbsolutePath
- val proc = Runtime.getRuntime.exec(cmdString+argString)
- val in = proc.getInputStream
- val err = proc.getErrorStream
- val writer = new PrintWriter(new FileWriter(logFile), true)
- val inApp = new StreamAppender(new BufferedReader(new InputStreamReader(in)),
- writer)
- val errApp = new StreamAppender(new BufferedReader(new InputStreamReader(err)),
- writer)
- val async = new Thread(errApp)
- async.start()
- inApp.run()
- async.join()
-
- writer.close()
-
- diff = compareOutput(file.getParentFile, fileBase, kind, logFile)
- if (!diff.equals("")) {
- NestUI.verbose("output differs from log file\n")
- succeeded = false
- }
- } catch { // *catch-all*
- case e: Exception =>
- NestUI.verbose("caught "+e)
- succeeded = false
- }
-
- LogContext(logFile, Some((swr, wr)))
- } else
- LogContext(logFile, None)
- }
- }
-
- def reportAll(cont: (Int, Int) => Unit) {
- NestUI.verbose("finished testing "+kind+" with "+errors+" errors")
- NestUI.verbose("created "+compileMgr.numSeparateCompilers+" separate compilers")
- timer.cancel()
- cont(files.length-errors, errors)
- }
-
- def reportResult(logs: Option[LogContext]) {
- if (!succeeded) {
- errors += 1
- NestUI.verbose("incremented errors: "+errors)
- }
-
- try {
- // delete log file only if test was successful
- if (succeeded && !logs.isEmpty)
- logs.get.file.toDelete = true
-
- if (!logs.isEmpty)
- logs.get.writers match {
- case Some((swr, wr)) =>
- printInfoEnd(succeeded, wr)
- wr.flush()
- swr.flush()
- NestUI.normal(swr.toString)
- if (!succeeded && fileManager.showDiff && diff != "")
- NestUI.normal(diff)
- if (!succeeded && fileManager.showLog)
- showLog(logs.get.file)
- case None =>
- }
- } catch {
- case npe: NullPointerException =>
- }
- }
-
- val numFiles = files.size
- if (numFiles == 0)
- reportAll(topcont)
-
- // maps canonical file names to the test result (0: OK, 1: FAILED, 2: TIMOUT)
- val status = new HashMap[String, Int]
-
- var fileCnt = 1
- Actor.loopWhile(fileCnt <= numFiles) {
- val parent = self
-
- actor {
- val testFile = files(fileCnt-1)
-
- val ontimeout = new TimerTask {
- def run() = parent ! Timeout(testFile)
- }
- timer.schedule(ontimeout, fileManager.timeout.toLong)
-
- val context = try {
- processSingleFile(testFile)
- } catch {
- case t: Throwable =>
- NestUI.verbose("while invoking compiler ("+files+"):")
- NestUI.verbose("caught "+t)
- t.printStackTrace
- if (t.getCause != null)
- t.getCause.printStackTrace
- LogContext(null, None)
- }
- parent ! Result(testFile, context)
- }
-
- react {
- case res: TestResult =>
- val path = res.file.getCanonicalPath
- status.get(path) match {
- case Some(stat) => // ignore message
- case None => res match {
- case Timeout(_) =>
- status += (path -> 2)
- val swr = new StringWriter
- val wr = new PrintWriter(swr)
- printInfoStart(files(fileCnt-1), wr)
- printInfoTimeout(wr)
- wr.flush()
- swr.flush()
- NestUI.normal(swr.toString)
- succeeded = false
- reportResult(None)
- if (fileCnt == numFiles)
- reportAll(topcont)
- fileCnt += 1
- case Result(_, logs) =>
- status += (path -> (if (succeeded) 0 else 1))
- reportResult(if (logs != null) Some(logs) else None)
- if (fileCnt == numFiles)
- reportAll(topcont)
- fileCnt += 1
- }
- }
- }
- }
- }
-
- def showLog(logFile: File) {
- try {
- val logReader = new BufferedReader(new FileReader(logFile))
- val strWriter = new StringWriter
- val logWriter = new PrintWriter(strWriter, true)
- val logAppender = new StreamAppender(logReader, logWriter)
- logAppender.run()
- logReader.close()
- val log = strWriter.toString
- NestUI.normal(log)
- } catch {
- case fnfe: java.io.FileNotFoundException =>
- NestUI.failure("Couldn't open log file \""+logFile+"\".")
- }
- }
-}
diff --git a/src/partest/scala/tools/partest/package.scala b/src/partest/scala/tools/partest/package.scala
new file mode 100644
index 0000000000..3ef4db7cd8
--- /dev/null
+++ b/src/partest/scala/tools/partest/package.scala
@@ -0,0 +1,47 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2010 LAMP/EPFL
+ */
+
+package scala.tools
+
+import nsc.io.{ File, Path, Process, Directory }
+import nsc.util.CommandLineSpec
+import java.nio.charset.CharacterCodingException
+
+package object partest {
+ /** The CharacterCodingExceptions are thrown at least on windows trying
+ * to read a file like script/utf-8.scala
+ */
+ private[partest] def safeSlurp(f: File) =
+ try if (f.exists) f.slurp() else ""
+ catch { case _: CharacterCodingException => "" }
+
+ private[partest] def safeLines(f: File) = safeSlurp(f) split """\r\n|\r|\n""" toList
+ private[partest] def safeArgs(f: File) = toArgs(safeSlurp(f))
+ private[partest] def safeToInt(s: String) = try Some(s.toInt) catch { case _: NumberFormatException => None }
+ private[partest] def isJava(f: Path) = f.isFile && (f hasExtension "java")
+ private[partest] def isScala(f: Path) = f.isFile && (f hasExtension "scala")
+ private[partest] def isJavaOrScala(f: Path) = isJava(f) || isScala(f)
+
+ private[partest] def toArgs(line: String) = CommandLineSpec toArgs line
+ private[partest] def fromArgs(args: List[String]) = CommandLineSpec fromArgs args
+
+ /** Strings, argument lists, etc. */
+
+ private[partest] def fromAnyArgs(args: List[Any]) = args mkString " " // separate to avoid accidents
+ private[partest] def toStringTrunc(x: Any, max: Int = 240) = {
+ val s = x.toString
+ if (s.length < max) s
+ else (s take max) + " [...]"
+ }
+ private[partest] def setProp(k: String, v: String) = scala.util.Properties.setProp(k, v)
+
+ /** Pretty self explanatory. */
+ def printAndExit(msg: String): Unit = {
+ println(msg)
+ exit(1)
+ }
+
+ /** Apply a function and return the passed value */
+ def returning[T](x: T)(f: T => Unit): T = { f(x) ; x }
+} \ No newline at end of file
diff --git a/src/partest/scala/tools/partest/util/package.scala b/src/partest/scala/tools/partest/util/package.scala
new file mode 100644
index 0000000000..bc5470ba5d
--- /dev/null
+++ b/src/partest/scala/tools/partest/util/package.scala
@@ -0,0 +1,61 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2010 LAMP/EPFL
+ */
+
+package scala.tools
+package partest
+
+import java.util.{ Timer, TimerTask }
+import java.io.StringWriter
+import nsc.io._
+
+/** Misc code still looking for a good home.
+ */
+package object util {
+
+ def allPropertiesString() = javaHashtableToString(System.getProperties)
+
+ private def javaHashtableToString(table: java.util.Hashtable[_,_]) = {
+ import collection.JavaConversions._
+ (table.toList map { case (k, v) => "%s -> %s\n".format(k, v) }).sorted mkString
+ }
+
+ def filesToSet(pre: String, fs: List[String]): Set[AbstractFile] =
+ fs flatMap (x => Option(AbstractFile getFile (Path(pre) / x).path)) toSet
+
+ /** Copies one Path to another Path, trying to be sensible when one or the
+ * other is a Directory. Returns true if it believes it succeeded.
+ */
+ def copyPath(from: Path, to: Path): Boolean = {
+ if (!to.parent.isDirectory)
+ to.parent.createDirectory(force = true)
+
+ def copyDir = {
+ val sub = to / from.name createDirectory true
+ from.toDirectory.list forall (x => copyPath(x, sub))
+ }
+ (from.isDirectory, to.isDirectory) match {
+ case (true, true) => copyDir
+ case (true, false) => false
+ case (false, true) => from.toFile copyTo (to / from.name)
+ case (false, false) => from.toFile copyTo to
+ }
+ }
+
+ /**
+ * Compares two files using a Java implementation of the GNU diff
+ * available at http://www.bmsi.com/java/#diff.
+ *
+ * @param f1 the first file to be compared
+ * @param f2 the second file to be compared
+ * @return the text difference between the compared files
+ */
+ def diffFiles(f1: File, f2: File): String = {
+ val diffWriter = new StringWriter
+ val args = Array(f1.toAbsolute.path, f2.toAbsolute.path)
+
+ io.DiffPrint.doDiff(args, diffWriter)
+ val result = diffWriter.toString
+ if (result == "No differences") "" else result
+ }
+}
diff --git a/src/partest/scala/tools/partest/utils/PrintMgr.scala b/src/partest/scala/tools/partest/utils/PrintMgr.scala
deleted file mode 100644
index 10533130f1..0000000000
--- a/src/partest/scala/tools/partest/utils/PrintMgr.scala
+++ /dev/null
@@ -1,52 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala Parallel Testing **
-** / __/ __// _ | / / / _ | (c) 2007-2010, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-package scala.tools.partest
-package utils
-
-/**
- * @author Thomas Hofer
- */
-object PrintMgr {
-
- val NONE = 0
- val SOME = 1
- val MANY = 2
-
- var outline = ""
- var success = ""
- var failure = ""
- var warning = ""
- var default = ""
-
- def initialization(number: Int) = number match {
- case MANY =>
- outline = Console.BOLD + Console.BLACK
- success = Console.BOLD + Console.GREEN
- failure = Console.BOLD + Console.RED
- warning = Console.BOLD + Console.YELLOW
- default = Console.RESET
- case SOME =>
- outline = Console.BOLD + Console.BLACK
- success = Console.RESET
- failure = Console.BOLD + Console.BLACK
- warning = Console.BOLD + Console.BLACK
- default = Console.RESET
- case _ =>
- }
-
- def printOutline(msg: String) = print(outline + msg + default)
-
- def printSuccess(msg: String) = print(success + msg + default)
-
- def printFailure(msg: String) = print(failure + msg + default)
-
- def printWarning(msg: String) = print(warning + msg + default)
-}
diff --git a/src/scalap/scala/tools/scalap/Classfile.scala b/src/scalap/scala/tools/scalap/Classfile.scala
index 44f687bd85..c4f273c5aa 100644
--- a/src/scalap/scala/tools/scalap/Classfile.scala
+++ b/src/scalap/scala/tools/scalap/Classfile.scala
@@ -26,6 +26,7 @@ class Classfile(in: ByteArrayReader) {
val fields = readMembers(true)
val methods = readMembers(false)
val attribs = readAttribs
+ def scalaSigAttribute = attribs find (_.toString == Main.SCALA_SIG)
def readAttribs = {
val n = in.nextChar
diff --git a/src/scalap/scala/tools/scalap/Decode.scala b/src/scalap/scala/tools/scalap/Decode.scala
index d859ac5766..e9f9a390c5 100644
--- a/src/scalap/scala/tools/scalap/Decode.scala
+++ b/src/scalap/scala/tools/scalap/Decode.scala
@@ -10,7 +10,9 @@
package scala.tools.scalap
import scala.tools.scalap.scalax.rules.scalasig._
+import scala.tools.nsc.util.ScalaClassLoader
import scala.tools.nsc.util.ScalaClassLoader.getSystemLoader
+import Main.SCALA_SIG
/** Temporary decoder. This would be better off in the scala.tools.nsc
* but right now the compiler won't acknowledge scala.tools.scalap
@@ -23,15 +25,52 @@ object Decode {
case _ => NoSymbol
}
+ /** Return the classfile bytes representing the scala sig attribute.
+ */
+ def scalaSigBytes(name: String): Option[Array[Byte]] = scalaSigBytes(name, getSystemLoader())
+ def scalaSigBytes(name: String, classLoader: ScalaClassLoader): Option[Array[Byte]] = {
+ val bytes = classLoader.findBytesForClassName(name)
+ val reader = new ByteArrayReader(bytes)
+ val cf = new Classfile(reader)
+ cf.scalaSigAttribute map (_.data)
+ }
+
+ /** private[scala] so nobody gets the idea this is a supported interface.
+ */
+ private[scala] def caseParamNames(path: String): Option[List[String]] = {
+ val (outer, inner) = (path indexOf '$') match {
+ case -1 => (path, "")
+ case x => (path take x, path drop (x + 1))
+ }
+
+ for {
+ clazz <- getSystemLoader.tryToLoadClass[AnyRef](outer)
+ ssig <- ScalaSigParser.parse(clazz)
+ }
+ yield {
+ val f: PartialFunction[Symbol, List[String]] =
+ if (inner.isEmpty) {
+ case x: MethodSymbol if x.isCaseAccessor && (x.name endsWith " ") => List(x.name dropRight 1)
+ }
+ else {
+ case x: ClassSymbol if x.name == inner =>
+ val xs = x.children filter (child => child.isCaseAccessor && (child.name endsWith " "))
+ xs.toList map (_.name dropRight 1)
+ }
+
+ (ssig.symbols collect f).flatten toList
+ }
+ }
+
/** Returns a map of Alias -> Type for the given package.
*/
- def typeAliases(pkg: String) = {
+ private[scala] def typeAliases(pkg: String) = {
for {
clazz <- getSystemLoader.tryToLoadClass[AnyRef](pkg + ".package")
ssig <- ScalaSigParser.parse(clazz)
}
yield {
- val typeAliases = ssig.symbols partialMap { case x: AliasSymbol => x }
+ val typeAliases = ssig.symbols collect { case x: AliasSymbol => x }
Map(typeAliases map (x => (x.name, getAliasSymbol(x.infoType).path)): _*)
}
}
diff --git a/src/scalap/scala/tools/scalap/Main.scala b/src/scalap/scala/tools/scalap/Main.scala
index 59c46df25f..69a91dafce 100644
--- a/src/scalap/scala/tools/scalap/Main.scala
+++ b/src/scalap/scala/tools/scalap/Main.scala
@@ -9,10 +9,14 @@
package scala.tools.scalap
-import java.io.{File, PrintStream, OutputStreamWriter, ByteArrayOutputStream}
+import java.io.{PrintStream, OutputStreamWriter, ByteArrayOutputStream}
import scalax.rules.scalasig._
-import tools.nsc.io.AbstractFile
-import tools.nsc.util.{ClassPath, JavaClassPath}
+import scalax.rules.scalasig.ClassFileParser.{ConstValueIndex, Annotation}
+import tools.nsc.util.{ ClassPath }
+import tools.util.PathResolver
+import ClassPath.DefaultJavaContext
+import tools.nsc.io.{PlainFile, AbstractFile}
+import scala.reflect.generic.ByteCodecs
/**The main object used to execute scalap on the command-line.
*
@@ -20,6 +24,9 @@ import tools.nsc.util.{ClassPath, JavaClassPath}
*/
object Main {
val SCALA_SIG = "ScalaSig"
+ val SCALA_SIG_ANNOTATION = "Lscala/reflect/ScalaSignature;"
+ val BYTES_VALUE = "bytes"
+
val versionMsg = "Scala classfile decoder " +
Properties.versionString + " -- " +
Properties.copyrightString + "\n"
@@ -33,7 +40,8 @@ object Main {
*/
def usage {
Console.println("usage: scalap {<option>} <name>")
- Console.println("where <option> is")
+ Console.println("where <name> is fully-qualified class name or <package_name>.package for package objects")
+ Console.println("and <option> is")
Console.println(" -private print private definitions")
Console.println(" -verbose print out additional information")
Console.println(" -version print out the version number of scalap")
@@ -94,17 +102,34 @@ object Main {
baos.toString
}
-
- def decompileScala(bytes: Array[Byte], isPackageObject: Boolean) = {
+ def decompileScala(bytes: Array[Byte], isPackageObject: Boolean): String = {
val byteCode = ByteCode(bytes)
val classFile = ClassFileParser.parse(byteCode)
classFile.attribute(SCALA_SIG).map(_.byteCode).map(ScalaSigAttributeParsers.parse) match {
- case Some(scalaSig) => Console.println(parseScalaSignature(scalaSig, isPackageObject))
- case None => //Do nothing
+ // No entries in ScalaSig attribute implies that the signature is stored in the annotation
+ case Some(ScalaSig(_, _, entries)) if entries.length == 0 => unpickleFromAnnotation(classFile, isPackageObject)
+ case Some(scalaSig) => parseScalaSignature(scalaSig, isPackageObject)
+ case None => ""
+ }
+ }
+
+ def unpickleFromAnnotation(classFile: ClassFile, isPackageObject: Boolean): String = {
+ import classFile._
+ classFile.annotation(SCALA_SIG_ANNOTATION) match {
+ case None => ""
+ case Some(Annotation(_, elements)) =>
+ val bytesElem = elements.find(elem => constant(elem.elementNameIndex) == BYTES_VALUE).get
+ val bytes = ((bytesElem.elementValue match {case ConstValueIndex(index) => constantWrapped(index)})
+ .asInstanceOf[StringBytesPair].bytes)
+ val length = ByteCodecs.decode(bytes)
+ val scalaSig = ScalaSigAttributeParsers.parse(ByteCode(bytes.take(length)))
+ parseScalaSignature(scalaSig, isPackageObject)
}
}
+
+
/**Executes scalap with the given arguments and classpath for the
* class denoted by <code>classname</code>.
*
@@ -125,7 +150,7 @@ object Main {
}
val bytes = cfile.toByteArray
if (isScalaFile(bytes)) {
- decompileScala(bytes, isPackageObjectFile(encName))
+ Console.println(decompileScala(bytes, isPackageObjectFile(encName)))
} else {
// construct a reader for the classfile content
val reader = new ByteArrayReader(cfile.toByteArray)
@@ -262,13 +287,8 @@ object Main {
verbose = arguments contains "-verbose"
printPrivates = arguments contains "-private"
// construct a custom class path
- val path = arguments.getArgument("-classpath") match {
- case None => arguments.getArgument("-cp") match {
- case None => EmptyClasspath
- case Some(path) => new JavaClassPath("", "", path, "", "")
- }
- case Some(path) => new JavaClassPath("", "", path, "", "")
- }
+ def cparg = List("-classpath", "-cp") map (arguments getArgument _) reduceLeft (_ orElse _)
+ val path = cparg map (PathResolver fromPathString _) getOrElse EmptyClasspath
// print the classpath if output is verbose
if (verbose) {
Console.println(Console.BOLD + "CLASSPATH" + Console.RESET + " = " + path)
@@ -279,12 +299,14 @@ object Main {
}
object EmptyClasspath extends ClassPath[AbstractFile] {
- import tools.nsc.util.ClassRep
/**
* The short name of the package (without prefix)
*/
def name: String = ""
- val classes: List[ClassRep[AbstractFile]] = Nil
+ def asURLs = Nil
+ def asClasspathString = ""
+ val context = DefaultJavaContext
+ val classes: List[ClassRep] = Nil
val packages: List[ClassPath[AbstractFile]] = Nil
val sourcepaths: List[AbstractFile] = Nil
}
diff --git a/src/scalap/scala/tools/scalap/Properties.scala b/src/scalap/scala/tools/scalap/Properties.scala
index f433737896..315b81cb3e 100644
--- a/src/scalap/scala/tools/scalap/Properties.scala
+++ b/src/scalap/scala/tools/scalap/Properties.scala
@@ -14,5 +14,4 @@ object Properties extends scala.util.PropertiesTrait
{
protected def propCategory = "decoder"
protected def pickJarBasedOn = classOf[Classfile]
- val cmdName = scala.tools.nsc.Properties.cmdName
}
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/Functors.scala b/src/scalap/scala/tools/scalap/scalax/rules/Functors.scala
index aa95b48d44..aa852c1e63 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/Functors.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/Functors.scala
@@ -60,7 +60,7 @@ trait Functors {
}
}
-/** One of the 'unit' definitions must be overriden in concrete subclasses */
+/** One of the 'unit' definitions must be overridden in concrete subclasses */
trait UnitFunctors extends Units with Functors {
def unit : M[Unit] = unit(())
def unit[A](a : => A) : M[A] = unit map { Unit => a }
@@ -73,7 +73,7 @@ trait Monoidals extends UnitFunctors {
implicit def app[A, B](fab : M[A => B]) = (fa : M[A]) => fa applyTo fab
implicit def appUnit[A, B](a2b : A => B) = app(unit(a2b))
- /** One of 'and' and 'applyTo' definitions must be overriden in concrete subclasses */
+ /** One of 'and' and 'applyTo' definitions must be overridden in concrete subclasses */
trait Monoidal[+A] extends Functor[A] { self : M[A] =>
def and[B](fb : => M[B]) : M[(A, B)] = ((a : A) => (b : B) => (a, b))(this)(fb)
def applyTo[B](fab : M[A => B]) : M[B] = fab and this map { case (f, a) => f(a) }
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/Memoisable.scala b/src/scalap/scala/tools/scalap/scalax/rules/Memoisable.scala
index 1324ea695a..827c2dfff7 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/Memoisable.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/Memoisable.scala
@@ -44,7 +44,7 @@ trait DefaultMemoisable extends Memoisable {
map.getOrElseUpdate(key, compute(key, a)).asInstanceOf[A]
}
- protected def compute[A](key : AnyRef, a : => A) = a match {
+ protected def compute[A](key : AnyRef, a : => A): Any = a match {
case success : Success[_, _] => onSuccess(key, success); success
case other =>
if(DefaultMemoisable.debug) println(key + " -> " + other)
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/Result.scala b/src/scalap/scala/tools/scalap/scalax/rules/Result.scala
index 6befbb83c8..17ad4bd053 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/Result.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/Result.scala
@@ -42,11 +42,11 @@ case class Success[+Out, +A](out : Out, value : A) extends Result[Out, A, Nothin
def toOption = Some(value)
- def map[B](f : A => B) = Success(out, f(value))
- def mapOut[Out2](f : Out => Out2) = Success(f(out), value)
- def map[Out2, B](f : (Out, A) => (Out2, B)) = f(out, value) match { case (out2, b) => Success(out2, b) }
- def flatMap[Out2, B](f : (Out, A) => Result[Out2, B, Nothing]) = f(out, value)
- def orElse[Out2 >: Out, B >: A](other : => Result[Out2, B, Nothing]) = this
+ def map[B](f : A => B) : Result[Out, B, Nothing] = Success(out, f(value))
+ def mapOut[Out2](f : Out => Out2) : Result[Out2, A, Nothing] = Success(f(out), value)
+ def map[Out2, B](f : (Out, A) => (Out2, B)) : Success[Out2, B] = f(out, value) match { case (out2, b) => Success(out2, b) }
+ def flatMap[Out2, B](f : (Out, A) => Result[Out2, B, Nothing]) : Result[Out2, B, Nothing]= f(out, value)
+ def orElse[Out2 >: Out, B >: A](other : => Result[Out2, B, Nothing]) : Result[Out2, B, Nothing] = this
}
sealed abstract class NoSuccess[+X] extends Result[Nothing, Nothing, X] {
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/Rules.scala b/src/scalap/scala/tools/scalap/scalax/rules/Rules.scala
index 4e8ddc8dbe..43f9c20b1d 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/Rules.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/Rules.scala
@@ -98,7 +98,7 @@ trait StateRules {
def nil = unit(Nil)
def none = unit(None)
- /** Create a rule that suceeds if f(in) is true. */
+ /** Create a rule that identities if f(in) is true. */
def cond(f : S => Boolean) = get filter f
/** Create a rule that succeeds if all of the given rules succeed.
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/SeqRule.scala b/src/scalap/scala/tools/scalap/scalax/rules/SeqRule.scala
index 54f2c70bdc..34f52a1e19 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/SeqRule.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/SeqRule.scala
@@ -24,7 +24,7 @@ class InRule[In, +Out, +A, +X](rule : Rule[In, Out, A, X]) {
in : In => f(rule(in))(in)
}
- /** Creates a rule that suceeds only if the original rule would fail on the given context. */
+ /** Creates a rule that succeeds only if the original rule would fail on the given context. */
def unary_! : Rule[In, In, Unit, Nothing] = mapRule {
case Success(_, _) => in : In => Failure
case _ => in : In => Success(in, ())
@@ -82,7 +82,7 @@ class SeqRule[S, +A, +X](rule : Rule[S, S, A, X]) {
/** Repeats this rule num times */
def times(num : Int) : Rule[S, S, Seq[A], X] = from[S] {
- val result = new collection.mutable.GenericArray[A](num)
+ val result = new collection.mutable.ArraySeq[A](num)
// more compact using HoF but written this way so it's tail-recursive
def rep(i : Int, in : S) : Result[S, Seq[A], X] = {
if (i == num) Success(in, result)
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala
index 37bfa9cfea..01652a50b9 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala
@@ -9,8 +9,6 @@ import java.io.IOException
import scala._
import scala.Predef._
-import scalax.rules.Error
-
object ByteCode {
def apply(bytes : Array[Byte]) = new ByteCode(bytes, 0, bytes.length)
@@ -62,11 +60,23 @@ class ByteCode(val bytes : Array[Byte], val pos : Int, val length : Int) {
def toInt = fold(0) { (x, b) => (x << 8) + (b & 0xFF)}
def toLong = fold(0L) { (x, b) => (x << 8) + (b & 0xFF)}
- def toUTF8String = io.Codec toUTF8 (bytes drop pos take length) mkString
+ /**
+ * Transforms array subsequence of the current buffer into the UTF8 String and
+ * stores and array of bytes for the decompiler
+ */
+ def toUTF8StringAndBytes = {
+ val chunk: Array[Byte] = bytes drop pos take length
+ StringBytesPair(io.Codec.toUTF8(chunk).mkString, chunk)
+ }
def byte(i : Int) = bytes(pos) & 0xFF
}
+/**
+ * The wrapper for decode UTF-8 string
+ */
+case class StringBytesPair(string: String, bytes: Array[Byte])
+
/** Provides rules for parsing byte-code.
*/
trait ByteCodeReader extends RulesWithState {
@@ -84,6 +94,7 @@ trait ByteCodeReader extends RulesWithState {
object ClassFileParser extends ByteCodeReader {
def parse(byteCode : ByteCode) = expect(classFile)(byteCode)
+ def parseAnnotations(byteCode: ByteCode) = expect(annotations)(byteCode)
val magicNumber = (u4 filter (_ == 0xCAFEBABE)) | error("Not a valid class file")
val version = u2 ~ u2 ^^ { case minor ~ major => (major, minor) }
@@ -91,7 +102,7 @@ object ClassFileParser extends ByteCodeReader {
// NOTE currently most constants just evaluate to a string description
// TODO evaluate to useful values
- val utf8String = (u2 >> bytes) ^^ add1 { raw => pool => raw.toUTF8String }
+ val utf8String = (u2 >> bytes) ^^ add1 { raw => pool => raw.toUTF8StringAndBytes }
val intConstant = u4 ^^ add1 { x => pool => x }
val floatConstant = bytes(4) ^^ add1 { raw => pool => "Float: TODO" }
val longConstant = bytes(8) ^^ add2 { raw => pool => raw.toLong }
@@ -119,9 +130,32 @@ object ClassFileParser extends ByteCodeReader {
val interfaces = u2 >> u2.times
+ // bytes are parametrizes by the length, declared in u4 section
val attribute = u2 ~ (u4 >> bytes) ^~^ Attribute
+ // parse attributes u2 times
val attributes = u2 >> attribute.times
+ // parse runtime-visible annotations
+ abstract class ElementValue
+ case class AnnotationElement(elementNameIndex: Int, elementValue: ElementValue)
+ case class ConstValueIndex(index: Int) extends ElementValue
+ case class EnumConstValue(typeNameIndex: Int, constNameIndex: Int) extends ElementValue
+ case class ClassInfoIndex(index: Int) extends ElementValue
+ case class Annotation(typeIndex: Int, elementValuePairs: Seq[AnnotationElement]) extends ElementValue
+ case class ArrayValue(values: Seq[ElementValue]) extends ElementValue
+
+ def element_value: Parser[ElementValue] = u1 >> {
+ case 'B'|'C'|'D'|'F'|'I'|'J'|'S'|'Z'|'s' => u2 ^^ ConstValueIndex
+ case 'e' => u2 ~ u2 ^~^ EnumConstValue
+ case 'c' => u2 ^^ ClassInfoIndex
+ case '@' => annotation //nested annotation
+ case '[' => u2 >> element_value.times ^^ ArrayValue
+ }
+
+ val element_value_pair = u2 ~ element_value ^~^ AnnotationElement
+ val annotation: Parser[Annotation] = u2 ~ (u2 >> element_value_pair.times) ^~^ Annotation
+ val annotations = u2 >> annotation.times
+
val field = u2 ~ u2 ~ u2 ~ attributes ^~~~^ Field
val fields = u2 >> field.times
@@ -153,9 +187,20 @@ case class ClassFile(
def superClass = constant(header.superClassIndex)
def interfaces = header.interfaces.map(constant)
- def constant(index : Int) = header.constants(index)
+ def constant(index : Int) = header.constants(index) match {
+ case StringBytesPair(str, _) => str
+ case z => z
+ }
+
+ def constantWrapped(index: Int) = header.constants(index)
+
+ def attribute(name : String) = attributes.find {attrib => constant(attrib.nameIndex) == name }
+
+ val RUNTIME_VISIBLE_ANNOTATIONS = "RuntimeVisibleAnnotations"
+ def annotations = (attributes.find(attr => constant(attr.nameIndex) == RUNTIME_VISIBLE_ANNOTATIONS)
+ .map(attr => ClassFileParser.parseAnnotations(attr.byteCode)))
- def attribute(name : String) = attributes.find { attrib => constant(attrib.nameIndex) == name }
+ def annotation(name: String) = annotations.flatMap(seq => seq.find(annot => constant(annot.typeIndex) == name))
}
case class Attribute(nameIndex : Int, byteCode : ByteCode)
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala
index a97494ed6d..e0f95c8bbb 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala
@@ -55,7 +55,7 @@ object ScalaSigAttributeParsers extends ByteCodeReader {
val symtab = nat >> entry.times
val scalaSig = nat ~ nat ~ symtab ^~~^ ScalaSig
- val utf8 = read(_ toUTF8String)
+ val utf8 = read(x => x.toUTF8StringAndBytes.string)
val longValue = read(_ toLong)
}
@@ -164,21 +164,21 @@ object ScalaSigEntryParsers extends RulesWithState with MemoisableRules {
* | 5 ALIASsym len_Nat SymbolInfo
* | 6 CLASSsym len_Nat SymbolInfo [thistype_Ref]
* | 7 MODULEsym len_Nat SymbolInfo
- * | 8 VALsym len_Nat [defaultGetter_Ref] SymbolInfo [alias_Ref]
+ * | 8 VALsym len_Nat [defaultGetter_Ref /* no longer needed*/] SymbolInfo [alias_Ref]
* | 9 EXTref len_Nat name_Ref [owner_Ref]
* | 10 EXTMODCLASSref len_Nat name_Ref [owner_Ref]
* | 11 NOtpe len_Nat
* | 12 NOPREFIXtpe len_Nat
* | 13 THIStpe len_Nat sym_Ref
* | 14 SINGLEtpe len_Nat type_Ref sym_Ref
- * | 15 CONSTANTtpe len_Nat type_Ref constant_Ref
+ * | 15 CONSTANTtpe len_Nat constant_Ref
* | 16 TYPEREFtpe len_Nat type_Ref sym_Ref {targ_Ref}
* | 17 TYPEBOUNDStpe len_Nat tpe_Ref tpe_Ref
* | 18 REFINEDtpe len_Nat classsym_Ref {tpe_Ref}
* | 19 CLASSINFOtpe len_Nat classsym_Ref {tpe_Ref}
* | 20 METHODtpe len_Nat tpe_Ref {sym_Ref}
* | 21 POLYTtpe len_Nat tpe_Ref {sym_Ref}
- * | 22 IMPLICITMETHODtpe len_Nat tpe_Ref {tpe_Ref}
+ * | 22 IMPLICITMETHODtpe len_Nat tpe_Ref {sym_Ref} /* no longer needed */
* | 52 SUPERtpe len_Nat tpe_Ref tpe_Ref
* | 24 LITERALunit len_Nat
* | 25 LITERALboolean len_Nat value_Long
@@ -195,13 +195,12 @@ object ScalaSigEntryParsers extends RulesWithState with MemoisableRules {
* | 36 LITERALenum len_Nat sym_Ref
* | 40 SYMANNOT len_Nat sym_Ref AnnotInfoBody
* | 41 CHILDREN len_Nat sym_Ref {sym_Ref}
- * | 42 ANNOTATEDtpe len_Nat [sym_Ref] tpe_Ref {annotinfo_Ref}
+ * | 42 ANNOTATEDtpe len_Nat [sym_Ref /* no longer needed */] tpe_Ref {annotinfo_Ref}
* | 43 ANNOTINFO len_Nat AnnotInfoBody
* | 44 ANNOTARGARRAY len_Nat {constAnnotArg_Ref}
* | 47 DEBRUIJNINDEXtpe len_Nat level_Nat index_Nat
* | 48 EXISTENTIALtpe len_Nat type_Ref {symbol_Ref}
*/
-
val noSymbol = 3 -^ NoSymbol
val typeSymbol = symbolEntry(4) ^^ TypeSymbol as "typeSymbol"
val aliasSymbol = symbolEntry(5) ^^ AliasSymbol as "alias"
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala
index da268f4e44..26b01634f5 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala
@@ -16,6 +16,7 @@ import java.io.{PrintStream, ByteArrayOutputStream}
import java.util.regex.Pattern
import scala.tools.scalap.scalax.util.StringUtil
+import reflect.NameTransformer
class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
import stream._
@@ -24,13 +25,24 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
case class TypeFlags(printRep: Boolean)
- def printSymbol(symbol: Symbol) { printSymbol(0, symbol) }
+ def printSymbol(symbol: Symbol) {printSymbol(0, symbol)}
+
+ def printSymbolAttributes(s: Symbol, onNewLine: Boolean, indent: => Unit) = s match {
+ case t: SymbolInfoSymbol => {
+ for (a <- t.attributes) {
+ indent; print(toString(a))
+ if (onNewLine) print("\n") else print(" ")
+ }
+ }
+ case _ =>
+ }
def printSymbol(level: Int, symbol: Symbol) {
if (!symbol.isLocal &&
- !(symbol.isPrivate && !printPrivates)) {
+ !(symbol.isPrivate && !printPrivates)) {
def indent() {for (i <- 1 to level) print(" ")}
+ printSymbolAttributes(symbol, true, indent)
symbol match {
case o: ObjectSymbol =>
if (!isCaseClassObject(o)) {
@@ -50,8 +62,9 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
case a: AliasSymbol =>
indent
printAlias(level, a)
- case t: TypeSymbol =>
- ()
+ case t: TypeSymbol if !t.isParam && !t.name.matches("_\\$\\d+")=>
+ indent
+ printTypeSymbol(level, t)
case s =>
}
}
@@ -82,11 +95,20 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
}
def printModifiers(symbol: Symbol) {
+ // print private access modifier
+ if (symbol.isPrivate) print("private ")
+ else if (symbol.isProtected) print("protected ")
+ else symbol match {
+ case sym: SymbolInfoSymbol => sym.symbolInfo.privateWithin match {
+ case Some(t: Symbol) => print("private[" + t.name +"] ")
+ case _ =>
+ }
+ case _ =>
+ }
+
if (symbol.isSealed) print("sealed ")
if (symbol.isImplicit) print("implicit ")
if (symbol.isFinal && !symbol.isInstanceOf[ObjectSymbol]) print("final ")
- if (symbol.isPrivate) print("private ")
- else if (symbol.isProtected) print("protected ")
if (symbol.isOverride) print("override ")
if (symbol.isAbstract) symbol match {
case c@(_: ClassSymbol | _: ObjectSymbol) if !c.isTrait => print("abstract ")
@@ -98,30 +120,34 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
private def refinementClass(c: ClassSymbol) = c.name == "<refinement>"
def printClass(level: Int, c: ClassSymbol) {
- printModifiers(c)
- val defaultConstructor = if (c.isCase) getPrinterByConstructor(c) else ""
- if (c.isTrait) print("trait ") else print("class ")
- print(processName(c.name))
- val it = c.infoType
- val classType = it match {
- case PolyType(typeRef, symbols) => PolyTypeWithCons(typeRef, symbols, defaultConstructor)
- case _ => it
- }
- printType(classType)
- print(" {")
- //Print class selftype
- c.selfType match {
- case Some(t: Type) => print("\n"); print(" this : " + toString(t) + " =>")
- case None =>
+ if (c.name == "<local child>" /*scala.tools.nsc.symtab.StdNames.LOCALCHILD.toString()*/ ) {
+ print("\n")
+ } else {
+ printModifiers(c)
+ val defaultConstructor = if (c.isCase) getPrinterByConstructor(c) else ""
+ if (c.isTrait) print("trait ") else print("class ")
+ print(processName(c.name))
+ val it = c.infoType
+ val classType = it match {
+ case PolyType(typeRef, symbols) => PolyTypeWithCons(typeRef, symbols, defaultConstructor)
+ case _ => it
+ }
+ printType(classType)
+ print(" {")
+ //Print class selftype
+ c.selfType match {
+ case Some(t: Type) => print("\n"); print(" this : " + toString(t) + " =>")
+ case None =>
+ }
+ print("\n")
+ printChildren(level, c)
+ printWithIndent(level, "}\n")
}
- print("\n")
- printChildren(level, c)
- printWithIndent(level, "}\n")
}
def getPrinterByConstructor(c: ClassSymbol) = {
- c.children.find{
- case m : MethodSymbol if m.name == CONSTRUCTOR_NAME => true
+ c.children.find {
+ case m: MethodSymbol if m.name == CONSTRUCTOR_NAME => true
case _ => false
} match {
case Some(m: MethodSymbol) =>
@@ -170,7 +196,7 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
if (res.length > 1) StringUtil.decapitalize(res.substring(0, 1)) else res.toLowerCase
})
- def printMethodType(t: Type, printResult: Boolean)(implicit cont : => Unit): Unit = {
+ def printMethodType(t: Type, printResult: Boolean)(cont: => Unit): Unit = {
def _pmt(mt: Type {def resultType: Type; def paramSymbols: Seq[Symbol]}) = {
@@ -179,9 +205,9 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
case _ => "^___^"
})
- // Printe parameter clauses
+ // Print parameter clauses
print(paramEntries.mkString(
- "(" + (mt match {case _ : ImplicitMethodType => "implicit "; case _ => ""})
+ "(" + (mt match {case _: ImplicitMethodType => "implicit "; case _ => ""})
, ", ", ")"))
// Print result type
@@ -215,13 +241,14 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
val n = m.name
if (underCaseClass(m) && n == CONSTRUCTOR_NAME) return
+ if (n.matches(".+\\$default\\$\\d+")) return // skip default function parameters
if (n.startsWith("super$")) return // do not print auxiliary qualified super accessors
if (m.isAccessor && n.endsWith("_$eq")) return
indent()
printModifiers(m)
if (m.isAccessor) {
val indexOfSetter = m.parent.get.children.indexWhere(x => x.isInstanceOf[MethodSymbol] &&
- x.asInstanceOf[MethodSymbol].name == n + "_$eq")
+ x.asInstanceOf[MethodSymbol].name == n + "_$eq")
print(if (indexOfSetter > 0) "var " else "val ")
} else {
print("def ")
@@ -234,7 +261,7 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
val nn = processName(name)
print(nn)
printMethodType(m.infoType, true)(
- {if (!m.isDeferred) print(" = { /* compiled code */ }" /* Print body only for non-abstract metods */ )}
+ {if (!m.isDeferred) print(" = { /* compiled code */ }" /* Print body only for non-abstract methods */ )}
)
}
print("\n")
@@ -248,35 +275,43 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
printChildren(level, a)
}
- def printAttributes(sym: SymbolInfoSymbol) {
- for (attrib <- sym.attributes) printAttribute(attrib)
+ def printTypeSymbol(level: Int, t: TypeSymbol) {
+ print("type ")
+ print(processName(t.name))
+ printType(t.infoType)
+ print("\n")
}
- def printAttribute(attrib: AttributeInfo) {
- printType(attrib.typeRef, "@")
+ def toString(attrib: AttributeInfo): String = {
+ val buffer = new StringBuffer
+ buffer.append(toString(attrib.typeRef, "@"))
if (attrib.value.isDefined) {
- print("(")
- printValue(attrib.value.get)
- print(")")
+ buffer.append("(")
+ val value = attrib.value.get
+ val stringVal = value.isInstanceOf[String]
+ if (stringVal) buffer.append("\"")
+ buffer.append(valueToString(value))
+ if (stringVal) buffer.append("\"")
+ buffer.append(")")
}
if (!attrib.values.isEmpty) {
- print(" {")
+ buffer.append(" {")
for (name ~ value <- attrib.values) {
- print(" val ")
- print(processName(name))
- print(" = ")
- printValue(value)
+ buffer.append(" val ")
+ buffer.append(processName(name))
+ buffer.append(" = ")
+ buffer.append(valueToString(value))
}
- printValue(attrib.value)
- print(" }")
+ buffer.append(valueToString(attrib.value))
+ buffer.append(" }")
}
- print(" ")
+ buffer.toString
}
- def printValue(value: Any): Unit = value match {
- case t: Type => printType(t)
+ def valueToString(value: Any): String = value match {
+ case t: Type => toString(t)
// TODO string, char, float, etc.
- case _ => print(value)
+ case _ => value.toString
}
implicit object _tf extends TypeFlags(false)
@@ -289,57 +324,72 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
def toString(t: Type)(implicit flags: TypeFlags): String = toString(t, "")(flags)
- def toString(t: Type, sep: String)(implicit flags: TypeFlags): String = t match {
- case ThisType(symbol) => sep + symbol.path + ".type"
- case SingleType(typeRef, symbol) => sep + symbol.path + ".type"
- case ConstantType(constant) => sep + (constant match {
- case null => "scala.Null"
- case _: Unit => "scala.Unit"
- case _: Boolean => "scala.Boolean"
- case _: Byte => "scala.Byte"
- case _: Char => "scala.Char"
- case _: Short => "scala.Short"
- case _: Int => "scala.Int"
- case _: Long => "scala.Long"
- case _: Float => "scala.Float"
- case _: Double => "scala.Double"
- case _: String => "java.lang.String"
- case c: Class[_] => "java.lang.Class[" + c.getComponentType.getCanonicalName.replace("$", ".") + "]"
- })
- case TypeRefType(prefix, symbol, typeArgs) => sep + (symbol.path match {
- case "scala.<repeated>" => flags match {
- case TypeFlags(true) => toString(typeArgs.head) + "*"
- case _ => "scala.Seq" + typeArgString(typeArgs)
+ def toString(t: Type, sep: String)(implicit flags: TypeFlags): String = {
+ // print type itself
+ t match {
+ case ThisType(symbol) => sep + processName(symbol.path) + ".type"
+ case SingleType(typeRef, symbol) => sep + processName(symbol.path) + ".type"
+ case ConstantType(constant) => sep + (constant match {
+ case null => "scala.Null"
+ case _: Unit => "scala.Unit"
+ case _: Boolean => "scala.Boolean"
+ case _: Byte => "scala.Byte"
+ case _: Char => "scala.Char"
+ case _: Short => "scala.Short"
+ case _: Int => "scala.Int"
+ case _: Long => "scala.Long"
+ case _: Float => "scala.Float"
+ case _: Double => "scala.Double"
+ case _: String => "java.lang.String"
+ case c: Class[_] => "java.lang.Class[" + c.getComponentType.getCanonicalName.replace("$", ".") + "]"
+ })
+ case TypeRefType(prefix, symbol, typeArgs) => sep + (symbol.path match {
+ case "scala.<repeated>" => flags match {
+ case TypeFlags(true) => toString(typeArgs.head) + "*"
+ case _ => "scala.Seq" + typeArgString(typeArgs)
+ }
+ case "scala.<byname>" => "=> " + toString(typeArgs.head)
+ case _ => {
+ val path = StringUtil.cutSubstring(symbol.path)(".package") //remove package object reference
+ StringUtil.trimStart(processName(path) + typeArgString(typeArgs), "<empty>.")
+ }
+ })
+ case TypeBoundsType(lower, upper) => {
+ val lb = toString(lower)
+ val ub = toString(upper)
+ val lbs = if (!lb.equals("scala.Nothing")) " >: " + lb else ""
+ val ubs = if (!ub.equals("scala.Any")) " <: " + ub else ""
+ lbs + ubs
}
- case "scala.<byname>" => "=> " + toString(typeArgs.head)
- case _ => {
- val path = StringUtil.cutSubstring(symbol.path)(".package") //remove package object reference
- StringUtil.trimStart(processName(path) + typeArgString(typeArgs), "<empty>.")
+ case RefinedType(classSym, typeRefs) => sep + typeRefs.map(toString).mkString("", " with ", "")
+ case ClassInfoType(symbol, typeRefs) => sep + typeRefs.map(toString).mkString(" extends ", " with ", "")
+
+ case ImplicitMethodType(resultType, _) => toString(resultType, sep)
+ case MethodType(resultType, _) => toString(resultType, sep)
+
+ case PolyType(typeRef, symbols) => typeParamString(symbols) + toString(typeRef, sep)
+ case PolyTypeWithCons(typeRef, symbols, cons) => typeParamString(symbols) + processName(cons) + toString(typeRef, sep)
+ case AnnotatedType(typeRef, attribTreeRefs) => {
+ toString(typeRef, sep)
}
- })
- case TypeBoundsType(lower, upper) => " >: " + toString(lower) + " <: " + toString(upper)
- case RefinedType(classSym, typeRefs) => sep + typeRefs.map(toString).mkString("", " with ", "")
- case ClassInfoType(symbol, typeRefs) => sep + typeRefs.map(toString).mkString(" extends ", " with ", "")
-
- case ImplicitMethodType(resultType, _) => toString(resultType, sep)
- case MethodType(resultType, _) => toString(resultType, sep)
-
- case PolyType(typeRef, symbols) => typeParamString(symbols) + toString(typeRef, sep)
- case PolyTypeWithCons(typeRef, symbols, cons) => typeParamString(symbols) + cons + toString(typeRef, sep)
- case AnnotatedType(typeRef, attribTreeRefs) => toString(typeRef, sep)
- case AnnotatedWithSelfType(typeRef, symbol, attribTreeRefs) => toString(typeRef, sep)
- //case DeBruijnIndexType(typeLevel, typeIndex) =>
- case ExistentialType(typeRef, symbols) => {
- val refs = symbols.map(toString _).filter(!_.startsWith("_ ")).map("type " + _)
- toString(typeRef, sep) + (if (refs.size > 0) refs.mkString(" forSome {", "; ", "}") else "")
+ case AnnotatedWithSelfType(typeRef, symbol, attribTreeRefs) => toString(typeRef, sep)
+ //case DeBruijnIndexType(typeLevel, typeIndex) =>
+ case ExistentialType(typeRef, symbols) => {
+ val refs = symbols.map(toString _).filter(!_.startsWith("_")).map("type " + _)
+ toString(typeRef, sep) + (if (refs.size > 0) refs.mkString(" forSome {", "; ", "}") else "")
+ }
+ case _ => sep + t.toString
}
- case _ => sep + t.toString
}
def getVariance(t: TypeSymbol) = if (t.isCovariant) "+" else if (t.isContravariant) "-" else ""
def toString(symbol: Symbol): String = symbol match {
- case symbol: TypeSymbol => getVariance(symbol) + processName(symbol.name) + toString(symbol.infoType)
+ case symbol: TypeSymbol => {
+ val attrs = (for (a <- symbol.attributes) yield toString(a)).mkString(" ")
+ val atrs = if (attrs.length > 0) attrs.trim + " " else ""
+ atrs + getVariance(symbol) + processName(symbol.name) + toString(symbol.infoType)
+ }
case s => symbol.toString
}
@@ -356,19 +406,27 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
"\\$minus" -> "-", "\\$eq" -> "=", "\\$less" -> "<",
"\\$times" -> "*", "\\$div" -> "/", "\\$bslash" -> "\\\\",
"\\$greater" -> ">", "\\$qmark" -> "?", "\\$percent" -> "%",
- "\\$amp" -> "&", "\\$colon" -> ":", "\\$u2192" -> "→")
- val pattern = Pattern.compile(_syms.keysIterator.foldLeft("")((x, y) => if (x == "") y else x + "|" + y))
+ "\\$amp" -> "&", "\\$colon" -> ":", "\\$u2192" -> "→",
+ "\\$hash" -> "#")
+ val pattern = Pattern.compile(_syms.keys.foldLeft("")((x, y) => if (x == "") y else x + "|" + y))
val placeholderPattern = "_\\$(\\d)+"
+ private def stripPrivatePrefix(name: String) = {
+ val i = name.lastIndexOf("$$")
+ if (i > 0) name.substring(i + 2) else name
+ }
+
def processName(name: String) = {
- val m = pattern.matcher(name)
- var temp = name
+ val stripped = stripPrivatePrefix(name)
+ val m = pattern.matcher(stripped)
+ var temp = stripped
while (m.find) {
val key = m.group
val re = "\\" + key
temp = temp.replaceAll(re, _syms(re))
}
- temp.replaceAll(placeholderPattern, "_")
+ val result = temp.replaceAll(placeholderPattern, "_")
+ NameTransformer.decode(result)
}
}
diff --git a/src/swing/scala/swing/AbstractButton.scala b/src/swing/scala/swing/AbstractButton.scala
index 9335eaf706..5de0283e22 100644
--- a/src/swing/scala/swing/AbstractButton.scala
+++ b/src/swing/scala/swing/AbstractButton.scala
@@ -20,7 +20,7 @@ import javax.swing.{AbstractButton => JAbstractButton, Icon}
*
* @see javax.swing.AbstractButton
*/
-abstract class AbstractButton extends Component with Action.Trigger with Publisher {
+abstract class AbstractButton extends Component with Action.Trigger.Wrapper with Publisher {
override lazy val peer: JAbstractButton = new JAbstractButton with SuperMixin {}
def text: String = peer.getText
@@ -41,14 +41,6 @@ abstract class AbstractButton extends Component with Action.Trigger with Publish
def rolloverSelectedIcon: Icon = peer.getRolloverSelectedIcon
def rolloverSelectedIcon_=(b: Icon) = peer.setRolloverSelectedIcon(b)
- // TODO: we need an action cache
- private var _action: Action = Action.NoAction
- def action: Action = _action
- def action_=(a: Action) { _action = a; peer.setAction(a.peer) }
-
- //1.6: def hideActionText: Boolean = peer.getHideActionText
- //def hideActionText_=(b: Boolean) = peer.setHideActionText(b)
-
peer.addActionListener(Swing.ActionListener { e =>
publish(ButtonClicked(AbstractButton.this))
})
diff --git a/src/swing/scala/swing/Action.scala b/src/swing/scala/swing/Action.scala
index 7e0bd9286d..613038abe2 100644
--- a/src/swing/scala/swing/Action.scala
+++ b/src/swing/scala/swing/Action.scala
@@ -29,9 +29,17 @@ object Action {
def peer: javax.swing.JComponent {
def addActionListener(a: ActionListener)
def removeActionListener(a: ActionListener)
- def setAction(a: Action): javax.swing.Action
- def getAction: javax.swing.Action
+ def setAction(a: javax.swing.Action)
+ def getAction(): javax.swing.Action
}
+
+ // TODO: we need an action cache
+ private var _action: Action = Action.NoAction
+ def action: Action = _action
+ def action_=(a: Action) { _action = a; peer.setAction(a.peer) }
+
+ //1.6: def hideActionText: Boolean = peer.getHideActionText
+ //def hideActionText_=(b: Boolean) = peer.setHideActionText(b)
}
}
@@ -128,7 +136,7 @@ abstract class Action(title0: String) {
def accelerator: Option[KeyStroke] =
toOption(peer.getValue(javax.swing.Action.ACCELERATOR_KEY))
def accelerator_=(k: Option[KeyStroke]) {
- peer.putValue(javax.swing.Action.ACCELERATOR_KEY, toNull(k))
+ peer.putValue(javax.swing.Action.ACCELERATOR_KEY, k orNull)
}
/**
@@ -140,7 +148,7 @@ abstract class Action(title0: String) {
/*/**
* Only honored if not <code>None</code>. For various buttons.
*/
- 1.6: def selected: Option[Boolean] = toOption(peer.getValue(javax.swing.Action.SELECTED_KEY))
+ 1.6: def selected: Option[Boolean] = Option(peer.getValue(javax.swing.Action.SELECTED_KEY))
def selected_=(b: Option[Boolean]) {
peer.putValue(javax.swing.Action.SELECTED_KEY,
if (b == None) null else new java.lang.Boolean(b.get))
diff --git a/src/swing/scala/swing/BorderPanel.scala b/src/swing/scala/swing/BorderPanel.scala
index 2acc56787b..01fe873f93 100644
--- a/src/swing/scala/swing/BorderPanel.scala
+++ b/src/swing/scala/swing/BorderPanel.scala
@@ -51,5 +51,11 @@ class BorderPanel extends Panel with LayoutContainer {
wrapPosition(layoutManager.getConstraints(comp.peer).asInstanceOf[String])
protected def areValid(c: Constraints): (Boolean, String) = (true, "")
- protected def add(c: Component, l: Constraints) { peer.add(c.peer, l.toString) }
+ protected def add(c: Component, l: Constraints) {
+ // we need to remove previous components with the same constraints as the new one,
+ // otherwise the layout manager loses track of the old one
+ val old = layoutManager.getLayoutComponent(l.toString)
+ if(old != null) peer.remove(old)
+ peer.add(c.peer, l.toString)
+ }
}
diff --git a/src/swing/scala/swing/BufferWrapper.scala b/src/swing/scala/swing/BufferWrapper.scala
index 480ac59950..eac53c5d77 100644
--- a/src/swing/scala/swing/BufferWrapper.scala
+++ b/src/swing/scala/swing/BufferWrapper.scala
@@ -12,7 +12,6 @@
package scala.swing
import scala.collection.mutable.Buffer
-import scala.collection.IndexedSeq
/**
* Default partial implementation for buffer adapters.
diff --git a/src/swing/scala/swing/ComboBox.scala b/src/swing/scala/swing/ComboBox.scala
index 21d33acd32..fabbcf2d35 100644
--- a/src/swing/scala/swing/ComboBox.scala
+++ b/src/swing/scala/swing/ComboBox.scala
@@ -15,7 +15,6 @@ import event._
import javax.swing.{JList, JComponent, JComboBox, JTextField, ComboBoxModel, AbstractListModel, ListCellRenderer}
import java.awt.event.ActionListener
-
object ComboBox {
/**
* An editor for a combo box. Let's you edit the currently selected item.
@@ -205,8 +204,8 @@ class ComboBox[A](items: Seq[A]) extends Component with Publisher {
peer.setEditor(editor(this).comboBoxPeer)
}
- def prototypeDisplayValue: Option[A] = Swing.toOption(peer.getPrototypeDisplayValue)
+ def prototypeDisplayValue: Option[A] = toOption[A](peer.getPrototypeDisplayValue)
def prototypeDisplayValue_=(v: Option[A]) {
- peer.setPrototypeDisplayValue(Swing.toNull(v.map(_.asInstanceOf[AnyRef])))
+ peer.setPrototypeDisplayValue(v map toAnyRef orNull)
}
}
diff --git a/src/swing/scala/swing/Component.scala b/src/swing/scala/swing/Component.scala
index ebf6c2dda4..1f1a646db5 100644
--- a/src/swing/scala/swing/Component.scala
+++ b/src/swing/scala/swing/Component.scala
@@ -79,6 +79,9 @@ abstract class Component extends UIElement {
}
}
+ def name: String = peer.getName
+ def name_=(s: String) = peer.setName(s)
+
/**
* Used by certain layout managers, e.g., BoxLayout or OverlayLayout to
* align components relative to each other.
@@ -198,7 +201,7 @@ abstract class Component extends UIElement {
def requestFocusInWindow() = peer.requestFocusInWindow()
def hasFocus: Boolean = peer.isFocusOwner
- override def onFirstSubscribe {
+ protected override def onFirstSubscribe {
super.onFirstSubscribe
// TODO: deprecated, remove after 2.8
peer.addComponentListener(new java.awt.event.ComponentListener {
@@ -234,8 +237,8 @@ abstract class Component extends UIElement {
def propertyChange(e: java.beans.PropertyChangeEvent) {
e.getPropertyName match {
case "font" => publish(FontChanged(Component.this))
- case "background" => publish(ForegroundChanged(Component.this))
- case "foreground" => publish(BackgroundChanged(Component.this))
+ case "background" => publish(BackgroundChanged(Component.this))
+ case "foreground" => publish(ForegroundChanged(Component.this))
case _ =>
/*case "focusable" =>
case "focusTraversalKeysEnabled" =>
diff --git a/src/swing/scala/swing/Font.scala b/src/swing/scala/swing/Font.scala
new file mode 100644
index 0000000000..a58c8967d7
--- /dev/null
+++ b/src/swing/scala/swing/Font.scala
@@ -0,0 +1,70 @@
+package scala.swing
+
+/*object Font {
+ def apply(fontFormat: Int, fontFile: java.io.File) = java.awt.Font.createFont(fontFormat, fontFile)
+ def apply(fontFormat: Int, fontStream: java.io.InputStream) = java.awt.Font.createFont(fontFormat, fontStream)
+ def decode(str: String) = java.awt.Font.decode(str)
+
+ /* TODO: finish implementation
+ /**
+ * See [java.awt.Font.getFont].
+ */
+ def get(attributes: Map[_ <: java.text.AttributedCharacterIterator.Attribute, _]) =
+ java.awt.Font.getFont(ImmutableMapWrapper(attributes))
+
+ import java.{util => ju}
+ private case class ImmutableMapWrapper[A, B](underlying : Map[A, B])(m : ClassManifest[A]) extends ju.AbstractMap[A, B] {
+ self =>
+ override def size = underlying.size
+
+ override def put(k : A, v : B) =
+ throw new UnsupportedOperationException("This is a wrapper that does not support mutation")
+ override def remove(k : AnyRef) =
+ throw new UnsupportedOperationException("This is a wrapper that does not support mutation")
+
+ override def entrySet : ju.Set[ju.Map.Entry[A, B]] = new ju.AbstractSet[ju.Map.Entry[A, B]] {
+ def size = self.size
+
+ def iterator = new ju.Iterator[ju.Map.Entry[A, B]] {
+ val ui = underlying.iterator
+ var prev : Option[A] = None
+
+ def hasNext = ui.hasNext
+
+ def next = {
+ val (k, v) = ui.next
+ prev = Some(k)
+ new ju.Map.Entry[A, B] {
+ def getKey = k
+ def getValue = v
+ def setValue(v1 : B) = self.put(k, v1)
+ override def equals(other : Any) = other match {
+ case e : ju.Map.Entry[_, _] => k == e.getKey && v == e.getValue
+ case _ => false
+ }
+ }
+ }
+
+ def remove = prev match {
+ case Some(k) => val v = self.remove(k.asInstanceOf[AnyRef]) ; prev = None ; v
+ case _ => throw new IllegalStateException("next must be called at least once before remove")
+ }
+ }
+ }
+ }
+ */
+
+ /**
+ * See [java.awt.Font.getFont].
+ */
+ def get(nm: String) = java.awt.Font.getFont(nm)
+ /**
+ * See [java.awt.Font.getFont].
+ */
+ def get(nm: String, font: Font) = java.awt.Font.getFont(nm, font)
+
+ def Insets(x: Int, y: Int, width: Int, height: Int) = new Insets(x, y, width, height)
+ def Rectangle(x: Int, y: Int, width: Int, height: Int) = new Insets(x, y, width, height)
+ def Point(x: Int, y: Int) = new Point(x, y)
+ def Dimension(x: Int, y: Int) = new Dimension(x, y)
+}*/ \ No newline at end of file
diff --git a/src/swing/scala/swing/LayoutContainer.scala b/src/swing/scala/swing/LayoutContainer.scala
index 20fb3a7cd8..8e94bf565f 100644
--- a/src/swing/scala/swing/LayoutContainer.scala
+++ b/src/swing/scala/swing/LayoutContainer.scala
@@ -42,7 +42,10 @@ trait LayoutContainer extends Container.Wrapper {
protected def areValid(c: Constraints): (Boolean, String)
/**
* Adds a component with the given constraints to the underlying layout
- * manager and the component peer.
+ * manager and the component peer. This method needs to interact properly
+ * with method `constraintsFor`, i.e., it might need to remove previously
+ * held components in order to maintain layout consistency. See `BorderPanel`
+ * for an example.
*/
protected def add(comp: Component, c: Constraints)
@@ -53,7 +56,7 @@ trait LayoutContainer extends Container.Wrapper {
*
* layout(myComponent) = myConstraints
*
- * also ensures that myComponent is properly add to this container.
+ * also ensures that myComponent is properly added to this container.
*/
def layout: Map[Component, Constraints] = new Map[Component, Constraints] {
def -= (c: Component): this.type = { _contents -= c; this }
@@ -64,7 +67,7 @@ trait LayoutContainer extends Container.Wrapper {
add(c, l)
this
}
- def get(c: Component) = Swing.toOption(constraintsFor(c))
+ def get(c: Component) = Option(constraintsFor(c))
override def size = peer.getComponentCount
def iterator: Iterator[(Component, Constraints)] =
peer.getComponents.iterator.map { c =>
diff --git a/src/swing/scala/swing/ListView.scala b/src/swing/scala/swing/ListView.scala
index c0e99412c1..a15471796c 100644
--- a/src/swing/scala/swing/ListView.scala
+++ b/src/swing/scala/swing/ListView.scala
@@ -157,12 +157,12 @@ class ListView[A] extends Component {
def listData: Seq[A] = peer.getModel match {
case model: ModelWrapper => model.items
- case model @ _ => new Seq[A] {
+ case model @ _ => new Seq[A] { selfSeq =>
def length = model.getSize
def iterator = new Iterator[A] {
var idx = 0
def next = { idx += 1; apply(idx-1) }
- def hasNext = idx < length
+ def hasNext = idx < selfSeq.length
}
def apply(n: Int) = model.getElementAt(n).asInstanceOf[A]
}
diff --git a/src/swing/scala/swing/Publisher.scala b/src/swing/scala/swing/Publisher.scala
index cf33dca60a..cf60293ada 100644
--- a/src/swing/scala/swing/Publisher.scala
+++ b/src/swing/scala/swing/Publisher.scala
@@ -52,11 +52,15 @@ trait Publisher extends Reactor {
listenTo(this)
}
+/**
+ * A publisher that subscribes itself to an underlying event source not before the first
+ * reaction is installed. Can unsubscribe itself when the last reaction is uninstalled.
+ */
private[swing] trait LazyPublisher extends Publisher {
import Reactions._
- def onFirstSubscribe()
- def onLastUnsubscribe()
+ protected def onFirstSubscribe()
+ protected def onLastUnsubscribe()
override def subscribe(listener: Reaction) {
if(listeners.size == 1) onFirstSubscribe()
diff --git a/src/swing/scala/swing/RichWindow.scala b/src/swing/scala/swing/RichWindow.scala
index 555988b87b..a28794c6c1 100644
--- a/src/swing/scala/swing/RichWindow.scala
+++ b/src/swing/scala/swing/RichWindow.scala
@@ -53,7 +53,7 @@ sealed trait RichWindow extends Window {
* The menu bar of this frame or `NoMenuBar` if no menu bar is set.
*/
def menuBar: MenuBar = {
- val m = UIElement.cachedWrapper(peer.getJMenuBar)
+ val m = UIElement.cachedWrapper[MenuBar](peer.getJMenuBar)
if (m != null) m else MenuBar.NoMenuBar
}
/**
@@ -156,7 +156,7 @@ object Dialog {
initial: Int): Result.Value = {
val r = JOptionPane.showOptionDialog(nullPeer(parent), message, title,
optionType.id, messageType.id, Swing.wrapIcon(icon),
- entries.map(_.asInstanceOf[AnyRef]).toArray, entries(initial))
+ entries map toAnyRef toArray, entries(initial))
Result(r)
}
@@ -168,11 +168,12 @@ object Dialog {
entries: Seq[A] = Nil,
initial: A): Option[A] = {
val e = if (entries.isEmpty) null
- else entries.map(_.asInstanceOf[AnyRef]).toArray
+ else entries map toAnyRef toArray
val r = JOptionPane.showInputDialog(nullPeer(parent), message, title,
messageType.id, Swing.wrapIcon(icon),
e, initial)
- Swing.toOption(r)
+
+ toOption[A](r)
}
def showMessage(parent: Component = null,
message: Any,
diff --git a/src/swing/scala/swing/RootPanel.scala b/src/swing/scala/swing/RootPanel.scala
index 583e6a10a9..c18b68a306 100644
--- a/src/swing/scala/swing/RootPanel.scala
+++ b/src/swing/scala/swing/RootPanel.scala
@@ -22,11 +22,13 @@ trait RootPanel extends Container {
/**
* At most one component.
*/
- def contents: Seq[Component] = {
- Swing.toOption[Any](peer.getContentPane.getComponent(0)).map { c =>
- UIElement.cachedWrapper(c.asInstanceOf[javax.swing.JComponent])
- }.toList
- }
+ def contents: Seq[Component] =
+ if (peer.getContentPane.getComponentCount == 0) Nil
+ else {
+ val c = peer.getContentPane.getComponent(0).asInstanceOf[javax.swing.JComponent]
+ List(UIElement.cachedWrapper[Component](c))
+ }
+
def contents_=(c: Component) {
if (peer.getContentPane.getComponentCount > 0) {
val old = peer.getContentPane.getComponent(0)
diff --git a/src/swing/scala/swing/ScrollPane.scala b/src/swing/scala/swing/ScrollPane.scala
index c92ffe8211..fc2e96e67a 100644
--- a/src/swing/scala/swing/ScrollPane.scala
+++ b/src/swing/scala/swing/ScrollPane.scala
@@ -63,17 +63,17 @@ class ScrollPane extends Component with Container {
* want to let the row header be a list view with the same row height as
* the viewport component.
*/
- def rowHeaderView: Option[Component] = Swing.toOption(peer.getRowHeader.getView).map(UIElement.cachedWrapper(_))
+ def rowHeaderView: Option[Component] = Option(peer.getRowHeader.getView) map UIElement.cachedWrapper
def rowHeaderView_=(c: Component) = peer.setRowHeaderView(c.peer)
- def rowHeaderView_=(c: Option[Component]) = peer.setRowHeaderView(Swing.toNull(c.map(_.peer)))
+ def rowHeaderView_=(c: Option[Component]) = peer.setRowHeaderView(c map (_.peer) orNull)
- def columnHeaderView: Option[Component] = Swing.toOption(peer.getColumnHeader.getView).map(UIElement.cachedWrapper(_))
+ def columnHeaderView: Option[Component] = Option(peer.getColumnHeader.getView) map UIElement.cachedWrapper
def columnHeaderView_=(c: Component) = peer.setColumnHeaderView(c.peer)
- def columnHeaderView_=(c: Option[Component]) = peer.setColumnHeaderView(Swing.toNull(c.map(_.peer)))
+ def columnHeaderView_=(c: Option[Component]) = peer.setColumnHeaderView(c map (_.peer) orNull)
- def viewportView: Option[Component] = Swing.toOption(peer.getViewport.getView).map(UIElement.cachedWrapper(_))
+ def viewportView: Option[Component] = Option(peer.getViewport.getView) map UIElement.cachedWrapper
def viewportView_=(c: Component) = peer.setViewportView(c.peer)
- def viewportView_=(c: Option[Component]) = peer.setViewportView(Swing.toNull(c.map(_.peer)))
+ def viewportView_=(c: Option[Component]) = peer.setViewportView(c map (_.peer) orNull)
def verticalScrollBarPolicy = BarPolicy.wrap(peer.getVerticalScrollBarPolicy)
def verticalScrollBarPolicy_=(p: BarPolicy.Value) = peer.setVerticalScrollBarPolicy(p.verticalPeer)
diff --git a/src/swing/scala/swing/SimpleGUIApplication.scala b/src/swing/scala/swing/SimpleGUIApplication.scala
index c09fdfb246..e26a0263f8 100644
--- a/src/swing/scala/swing/SimpleGUIApplication.scala
+++ b/src/swing/scala/swing/SimpleGUIApplication.scala
@@ -15,12 +15,12 @@ import javax.swing._
/**
* Extend this class for most simple UI applications. Clients need to implement the
- * <code>top</code> method. Framework intialization is done by this class.
+ * <code>top</code> method. Framework initialization is done by this class.
*
* In order to conform to Swing's threading policy, never implement top or any additional
* member that created Swing components as a value unless component creation happens on
* the EDT (see Swing.onEDT and Swing.onEDTWait). Lazy values are okay for the same reason
- * if they are intialized on the EDT always.
+ * if they are initialized on the EDT always.
*/
@deprecated("Use SimpleSwingApplication instead") abstract class SimpleGUIApplication extends GUIApplication {
@@ -44,5 +44,5 @@ import javax.swing._
this.getClass.getResource(path)
def resourceFromUserDirectory(path: String): java.io.File =
- new java.io.File(System.getProperty("user.dir"), path)
+ new java.io.File(util.Properties.userDir, path)
}
diff --git a/src/swing/scala/swing/SimpleSwingApplication.scala b/src/swing/scala/swing/SimpleSwingApplication.scala
index 9f66cc5be5..786c7b4711 100644
--- a/src/swing/scala/swing/SimpleSwingApplication.scala
+++ b/src/swing/scala/swing/SimpleSwingApplication.scala
@@ -5,7 +5,7 @@ abstract class SimpleSwingApplication extends SwingApplication {
override def startup(args: Array[String]) {
val t = top
- t.pack()
+ if (t.size == new Dimension(0,0)) t.pack()
t.visible = true
}
@@ -13,5 +13,5 @@ abstract class SimpleSwingApplication extends SwingApplication {
this.getClass.getResource(path)
def resourceFromUserDirectory(path: String): java.io.File =
- new java.io.File(System.getProperty("user.dir"), path)
+ new java.io.File(util.Properties.userDir, path)
}
diff --git a/src/swing/scala/swing/Slider.scala b/src/swing/scala/swing/Slider.scala
index 8a257acc8b..793f5eb5bb 100644
--- a/src/swing/scala/swing/Slider.scala
+++ b/src/swing/scala/swing/Slider.scala
@@ -52,10 +52,11 @@ class Slider extends Component with Orientable.Wrapper with Publisher {
def adjusting = peer.getValueIsAdjusting
- def labels: scala.collection.Map[Int, Label] =
- new scala.collection.JavaConversions.JMapWrapper[Int, JLabel](
- peer.getLabelTable.asInstanceOf[java.util.Hashtable[Int, JLabel]])
- .mapValues(v => (UIElement cachedWrapper v).asInstanceOf[Label])
+ def labels: scala.collection.Map[Int, Label] = {
+ val labelTable = peer.getLabelTable.asInstanceOf[java.util.Hashtable[Int, JLabel]]
+ new scala.collection.JavaConversions.JMapWrapper(labelTable)
+ .mapValues(v => UIElement.cachedWrapper[Label](v))
+ }
def labels_=(l: scala.collection.Map[Int, Label]) {
// TODO: do some lazy wrapping
val table = new java.util.Hashtable[Any, Any]
diff --git a/src/swing/scala/swing/Swing.scala b/src/swing/scala/swing/Swing.scala
index 3ed2f8b950..3ca36a38a1 100644
--- a/src/swing/scala/swing/Swing.scala
+++ b/src/swing/scala/swing/Swing.scala
@@ -20,9 +20,6 @@ import javax.swing.{JComponent, Icon, BorderFactory, SwingUtilities}
* Helpers for this package.
*/
object Swing {
- protected[swing] def ifNull[A](o: Object, a: A): A = if(o eq null) a else o.asInstanceOf[A]
- protected[swing] def toOption[A](o: Object): Option[A] = if(o eq null) None else Some(o.asInstanceOf[A])
- protected[swing] def toNull[A>:Null<:AnyRef](a: Option[A]): A = if(a == None) null else a.get
protected[swing] def toNoIcon(i: Icon): Icon = if(i == null) EmptyIcon else i
protected[swing] def toNullIcon(i: Icon): Icon = if(i == EmptyIcon) null else i
protected[swing] def nullPeer(c: Component) = if (c != null) c.peer else null
diff --git a/src/swing/scala/swing/Table.scala b/src/swing/scala/swing/Table.scala
index fb61a0bbbf..8030c5043a 100644
--- a/src/swing/scala/swing/Table.scala
+++ b/src/swing/scala/swing/Table.scala
@@ -15,7 +15,7 @@ import event._
import javax.swing._
import javax.swing.table._
import javax.swing.event._
-import scala.collection.mutable.{Set, IndexedSeq}
+import scala.collection.mutable
object Table {
object AutoResizeMode extends Enumeration {
@@ -173,7 +173,7 @@ class Table extends Component with Scrollable.Wrapper {
object selection extends Publisher {
// TODO: could be a sorted set
- protected abstract class SelectionSet[A](a: =>Seq[A]) extends scala.collection.mutable.Set[A] {
+ protected abstract class SelectionSet[A](a: =>Seq[A]) extends mutable.Set[A] {
def -=(n: A): this.type
def +=(n: A): this.type
def contains(n: A) = a.contains(n)
@@ -197,7 +197,7 @@ class Table extends Component with Scrollable.Wrapper {
def anchorIndex: Int = peer.getColumnModel.getSelectionModel.getAnchorSelectionIndex
}
- def cells: Set[(Int, Int)] =
+ def cells: mutable.Set[(Int, Int)] =
new SelectionSet[(Int, Int)]((for(r <- selection.rows; c <- selection.columns) yield (r,c)).toSeq) { outer =>
def -=(n: (Int, Int)) = {
peer.removeRowSelectionInterval(n._1,n._1)
diff --git a/src/swing/scala/swing/TextComponent.scala b/src/swing/scala/swing/TextComponent.scala
index f2ecd3d040..f249d57d0e 100644
--- a/src/swing/scala/swing/TextComponent.scala
+++ b/src/swing/scala/swing/TextComponent.scala
@@ -66,6 +66,7 @@ class TextComponent extends Component with Publisher {
def editable_=(x: Boolean) = peer.setEditable(x)
def cut() { peer.cut() }
def copy() { peer.copy() }
+ def paste() { peer.paste() }
def selected: String = peer.getSelectedText
def selectAll() { peer.selectAll() }
diff --git a/src/swing/scala/swing/TextField.scala b/src/swing/scala/swing/TextField.scala
index 540d4da0be..2f63da7f00 100644
--- a/src/swing/scala/swing/TextField.scala
+++ b/src/swing/scala/swing/TextField.scala
@@ -30,7 +30,7 @@ import java.awt.event._
*
* @see javax.swing.JTextField
*/
-class TextField(text0: String, columns0: Int) extends TextComponent with TextComponent.HasColumns {
+class TextField(text0: String, columns0: Int) extends TextComponent with TextComponent.HasColumns with Action.Trigger.Wrapper {
override lazy val peer: JTextField = new JTextField(text0, columns0) with SuperMixin
def this(text: String) = this(text, 0)
def this(columns: Int) = this("", columns)
@@ -48,7 +48,7 @@ class TextField(text0: String, columns0: Int) extends TextComponent with TextCom
publish(EditDone(TextField.this))
}
- override def onFirstSubscribe {
+ protected override def onFirstSubscribe {
super.onFirstSubscribe
peer.addActionListener(actionListener)
peer.addFocusListener(new FocusAdapter {
@@ -56,7 +56,7 @@ class TextField(text0: String, columns0: Int) extends TextComponent with TextCom
})
}
- override def onLastUnsubscribe {
+ protected override def onLastUnsubscribe {
super.onLastUnsubscribe
peer.removeActionListener(actionListener)
}
@@ -64,15 +64,16 @@ class TextField(text0: String, columns0: Int) extends TextComponent with TextCom
def verifier: String => Boolean = s => peer.getInputVerifier.verify(peer)
def verifier_=(v: String => Boolean) {
peer.setInputVerifier(new InputVerifier {
+ private val old = peer.getInputVerifier
def verify(c: JComponent) = v(text)
- override def shouldYieldFocus(c: JComponent) =
- peer.getInputVerifier.shouldYieldFocus(c)
+ override def shouldYieldFocus(c: JComponent) = old.shouldYieldFocus(c)
})
}
def shouldYieldFocus: String=>Boolean = s => peer.getInputVerifier.shouldYieldFocus(peer)
def shouldYieldFocus_=(y: String=>Boolean) {
peer.setInputVerifier(new InputVerifier {
- def verify(c: JComponent) = peer.getInputVerifier.verify(c)
+ private val old = peer.getInputVerifier
+ def verify(c: JComponent) = old.verify(c)
override def shouldYieldFocus(c: JComponent) = y(text)
})
}
diff --git a/src/swing/scala/swing/UIElement.scala b/src/swing/scala/swing/UIElement.scala
index 8a661626f4..0d15e12979 100644
--- a/src/swing/scala/swing/UIElement.scala
+++ b/src/swing/scala/swing/UIElement.scala
@@ -31,6 +31,10 @@ object UIElement {
* Java Swing peer. If this method finds one of the given type `C`,
* it will return that wrapper. Otherwise it returns `null`. This
* method never throws an exception.
+ *
+ * Clients should be extremely careful with type parameter `C` and
+ * its interaction with type inference. Better err on the side of caution
+ * and explicitly specify `C`.
*/
private[swing] def cachedWrapper[C>:Null<:UIElement](c: java.awt.Component): C = {
val w = c match {
@@ -119,7 +123,7 @@ trait UIElement extends Proxy with LazyPublisher {
def ignoreRepaint: Boolean = peer.getIgnoreRepaint
def ignoreRepaint_=(b: Boolean) { peer.setIgnoreRepaint(b) }
- def onFirstSubscribe {
+ protected def onFirstSubscribe {
peer.addComponentListener(new java.awt.event.ComponentListener {
def componentHidden(e: java.awt.event.ComponentEvent) {
publish(UIElementHidden(UIElement.this))
@@ -135,5 +139,5 @@ trait UIElement extends Proxy with LazyPublisher {
}
})
}
- def onLastUnsubscribe {}
+ protected def onLastUnsubscribe {}
}
diff --git a/src/swing/scala/swing/Window.scala b/src/swing/scala/swing/Window.scala
index 2fba3acd7a..4e8f687a71 100644
--- a/src/swing/scala/swing/Window.scala
+++ b/src/swing/scala/swing/Window.scala
@@ -39,12 +39,12 @@ abstract class Window extends UIElement with RootPanel with Publisher { outer =>
peer.pack() // pack also validates, which is generally required after an add
}
def defaultButton: Option[Button] =
- Swing.toOption(peer.getRootPane.getDefaultButton).map(UIElement.cachedWrapper(_))
+ toOption(peer.getRootPane.getDefaultButton) map UIElement.cachedWrapper[Button]
def defaultButton_=(b: Button) {
peer.getRootPane.setDefaultButton(b.peer)
}
def defaultButton_=(b: Option[Button]) {
- peer.getRootPane.setDefaultButton(Swing.toNull(b.map(_.peer)))
+ peer.getRootPane.setDefaultButton(b map (_.peer) orNull)
}
def dispose() { peer.dispose() }
@@ -55,7 +55,7 @@ abstract class Window extends UIElement with RootPanel with Publisher { outer =>
def centerOnScreen() { peer.setLocationRelativeTo(null) }
def location_=(p: Point) { peer.setLocation(p) }
- def owner: Window = UIElement.cachedWrapper(peer.getOwner)
+ def owner: Window = UIElement.cachedWrapper[Window](peer.getOwner)
def open() { peer setVisible true }
def close() { peer setVisible false }
diff --git a/src/swing/scala/swing/event/TableEvent.scala b/src/swing/scala/swing/event/TableEvent.scala
index 9e5a05629b..e12322fa18 100644
--- a/src/swing/scala/swing/event/TableEvent.scala
+++ b/src/swing/scala/swing/event/TableEvent.scala
@@ -18,7 +18,7 @@ abstract class TableChange(override val source: Table) extends TableEvent(source
/**
* The most general table change. The table might have changed completely,
- * i.e., comlumns might have been reordered, rows added or removed, etc.
+ * i.e., columns might have been reordered, rows added or removed, etc.
* No other event indicates that the structure might have changed.
*/
case class TableStructureChanged(override val source: Table) extends TableChange(source)
diff --git a/src/swing/scala/swing/package.scala b/src/swing/scala/swing/package.scala
index 8f4c281a4b..deb291ddb2 100644
--- a/src/swing/scala/swing/package.scala
+++ b/src/swing/scala/swing/package.scala
@@ -13,78 +13,8 @@ package object swing {
type Color = java.awt.Color
type Image = java.awt.Image
type Font = java.awt.Font
-}
-
-object Font {
- import swing._
-
- def apply(fontFormat: Int, fontFile: java.io.File) = java.awt.Font.createFont(fontFormat, fontFile)
- def apply(fontFormat: Int, fontStream: java.io.InputStream) = java.awt.Font.createFont(fontFormat, fontStream)
- def decode(str: String) = java.awt.Font.decode(str)
-
- /* TODO: finish implementation
- /**
- * See [java.awt.Font.getFont].
- */
- def get(attributes: Map[_ <: java.text.AttributedCharacterIterator.Attribute, _]) =
- java.awt.Font.getFont(ImmutableMapWrapper(attributes))
-
- import java.{util => ju}
- private case class ImmutableMapWrapper[A, B](underlying : Map[A, B])(m : ClassManifest[A]) extends ju.AbstractMap[A, B] {
- self =>
- override def size = underlying.size
-
- override def put(k : A, v : B) =
- throw new UnsupportedOperationException("This is a wrapper that does not support mutation")
- override def remove(k : AnyRef) =
- throw new UnsupportedOperationException("This is a wrapper that does not support mutation")
-
- override def entrySet : ju.Set[ju.Map.Entry[A, B]] = new ju.AbstractSet[ju.Map.Entry[A, B]] {
- def size = self.size
-
- def iterator = new ju.Iterator[ju.Map.Entry[A, B]] {
- val ui = underlying.iterator
- var prev : Option[A] = None
- def hasNext = ui.hasNext
-
- def next = {
- val (k, v) = ui.next
- prev = Some(k)
- new ju.Map.Entry[A, B] {
- def getKey = k
- def getValue = v
- def setValue(v1 : B) = self.put(k, v1)
- override def equals(other : Any) = other match {
- case e : ju.Map.Entry[_, _] => k == e.getKey && v == e.getValue
- case _ => false
- }
- }
- }
-
- def remove = prev match {
- case Some(k) => val v = self.remove(k.asInstanceOf[AnyRef]) ; prev = None ; v
- case _ => throw new IllegalStateException("next must be called at least once before remove")
- }
- }
- }
- }
- */
-
- /**
- * See [java.awt.Font.getFont].
- */
- def get(nm: String) = java.awt.Font.getFont(nm)
- /**
- * See [java.awt.Font.getFont].
- */
- def get(nm: String, font: Font) = java.awt.Font.getFont(nm, font)
-
-
- def Insets(x: Int, y: Int, width: Int, height: Int) = new Insets(x, y, width, height)
- def Rectangle(x: Int, y: Int, width: Int, height: Int) = new Insets(x, y, width, height)
- def Point(x: Int, y: Int) = new Point(x, y)
- def Dimension(x: Int, y: Int) = new Dimension(x, y)
-
-
-} \ No newline at end of file
+ protected[swing] def ifNull[A](o: Object, a: A): A = if(o eq null) a else o.asInstanceOf[A]
+ protected[swing] def toOption[A](o: Object): Option[A] = if(o eq null) None else Some(o.asInstanceOf[A])
+ protected[swing] def toAnyRef(x: Any): AnyRef = x.asInstanceOf[AnyRef]
+}
diff --git a/src/swing/scala/swing/test/SimpleApplet.scala b/src/swing/scala/swing/test/SimpleApplet.scala
index 090f4cde8d..d5f17f8a40 100644
--- a/src/swing/scala/swing/test/SimpleApplet.scala
+++ b/src/swing/scala/swing/test/SimpleApplet.scala
@@ -7,8 +7,7 @@ class SimpleApplet extends Applet {
object ui extends UI with Reactor {
def init() = {
val button = new Button("Press here!")
- val text = new TextArea("Java Version: " +
- System.getProperty("java.version")+"\n")
+ val text = new TextArea("Java Version: " + util.Properties.javaVersion + "\n")
listenTo(button)
reactions += {
case ButtonClicked(_) => text.text += "Button Pressed!\n"
diff --git a/test/files/cli/test1/Main.check.j9vm5 b/test/attic/files/cli/test1/Main.check.j9vm5
index de454ef478..de454ef478 100644
--- a/test/files/cli/test1/Main.check.j9vm5
+++ b/test/attic/files/cli/test1/Main.check.j9vm5
diff --git a/test/files/cli/test1/Main.check.java b/test/attic/files/cli/test1/Main.check.java
index 64410de98f..64410de98f 100644
--- a/test/files/cli/test1/Main.check.java
+++ b/test/attic/files/cli/test1/Main.check.java
diff --git a/test/files/cli/test1/Main.check.java5 b/test/attic/files/cli/test1/Main.check.java5
index 64410de98f..64410de98f 100644
--- a/test/files/cli/test1/Main.check.java5
+++ b/test/attic/files/cli/test1/Main.check.java5
diff --git a/test/files/cli/test1/Main.check.java5_api b/test/attic/files/cli/test1/Main.check.java5_api
index 8693a5d92f..8693a5d92f 100644
--- a/test/files/cli/test1/Main.check.java5_api
+++ b/test/attic/files/cli/test1/Main.check.java5_api
diff --git a/test/files/cli/test1/Main.check.java5_j9 b/test/attic/files/cli/test1/Main.check.java5_j9
index de454ef478..de454ef478 100644
--- a/test/files/cli/test1/Main.check.java5_j9
+++ b/test/attic/files/cli/test1/Main.check.java5_j9
diff --git a/test/files/cli/test1/Main.check.javac b/test/attic/files/cli/test1/Main.check.javac
index ba25d9b6ca..ba25d9b6ca 100644
--- a/test/files/cli/test1/Main.check.javac
+++ b/test/attic/files/cli/test1/Main.check.javac
diff --git a/test/files/cli/test1/Main.check.javac5 b/test/attic/files/cli/test1/Main.check.javac5
index 0cb29d31ff..0cb29d31ff 100644
--- a/test/files/cli/test1/Main.check.javac5
+++ b/test/attic/files/cli/test1/Main.check.javac5
diff --git a/test/files/cli/test1/Main.check.javac6 b/test/attic/files/cli/test1/Main.check.javac6
index 8f37a05bcb..8f37a05bcb 100644
--- a/test/files/cli/test1/Main.check.javac6
+++ b/test/attic/files/cli/test1/Main.check.javac6
diff --git a/test/files/cli/test1/Main.check.jikes b/test/attic/files/cli/test1/Main.check.jikes
index cd891689db..cd891689db 100644
--- a/test/files/cli/test1/Main.check.jikes
+++ b/test/attic/files/cli/test1/Main.check.jikes
diff --git a/test/files/cli/test1/Main.check.jikes5 b/test/attic/files/cli/test1/Main.check.jikes5
index cd891689db..cd891689db 100644
--- a/test/files/cli/test1/Main.check.jikes5
+++ b/test/attic/files/cli/test1/Main.check.jikes5
diff --git a/test/files/cli/test1/Main.check.scala b/test/attic/files/cli/test1/Main.check.scala
index 43b200ae02..43b200ae02 100644
--- a/test/files/cli/test1/Main.check.scala
+++ b/test/attic/files/cli/test1/Main.check.scala
diff --git a/test/files/cli/test1/Main.check.scala_api b/test/attic/files/cli/test1/Main.check.scala_api
index 6fac39d3f0..6fac39d3f0 100644
--- a/test/files/cli/test1/Main.check.scala_api
+++ b/test/attic/files/cli/test1/Main.check.scala_api
diff --git a/test/files/cli/test1/Main.check.scala_j9 b/test/attic/files/cli/test1/Main.check.scala_j9
index 65d5ddaac4..65d5ddaac4 100644
--- a/test/files/cli/test1/Main.check.scala_j9
+++ b/test/attic/files/cli/test1/Main.check.scala_j9
diff --git a/test/files/cli/test1/Main.check.scalac b/test/attic/files/cli/test1/Main.check.scalac
index 8465810d0b..8465810d0b 100644
--- a/test/files/cli/test1/Main.check.scalac
+++ b/test/attic/files/cli/test1/Main.check.scalac
diff --git a/test/files/cli/test1/Main.check.scalaint b/test/attic/files/cli/test1/Main.check.scalaint
index 88345d1874..88345d1874 100644
--- a/test/files/cli/test1/Main.check.scalaint
+++ b/test/attic/files/cli/test1/Main.check.scalaint
diff --git a/test/files/cli/test1/Main.java b/test/attic/files/cli/test1/Main.java
index 8850b87517..8850b87517 100644
--- a/test/files/cli/test1/Main.java
+++ b/test/attic/files/cli/test1/Main.java
diff --git a/test/files/cli/test1/Main.scala b/test/attic/files/cli/test1/Main.scala
index f7dd8a0a36..f7dd8a0a36 100644
--- a/test/files/cli/test1/Main.scala
+++ b/test/attic/files/cli/test1/Main.scala
diff --git a/test/files/cli/test2/Main.check.j9vm5 b/test/attic/files/cli/test2/Main.check.j9vm5
index 8f4fdf8aa1..8f4fdf8aa1 100644
--- a/test/files/cli/test2/Main.check.j9vm5
+++ b/test/attic/files/cli/test2/Main.check.j9vm5
diff --git a/test/files/cli/test2/Main.check.java b/test/attic/files/cli/test2/Main.check.java
index aca383de3e..aca383de3e 100644
--- a/test/files/cli/test2/Main.check.java
+++ b/test/attic/files/cli/test2/Main.check.java
diff --git a/test/files/cli/test2/Main.check.java5 b/test/attic/files/cli/test2/Main.check.java5
index aca383de3e..aca383de3e 100644
--- a/test/files/cli/test2/Main.check.java5
+++ b/test/attic/files/cli/test2/Main.check.java5
diff --git a/test/files/cli/test2/Main.check.java5_api b/test/attic/files/cli/test2/Main.check.java5_api
index 4ff775c3da..4ff775c3da 100644
--- a/test/files/cli/test2/Main.check.java5_api
+++ b/test/attic/files/cli/test2/Main.check.java5_api
diff --git a/test/files/cli/test2/Main.check.java5_j9 b/test/attic/files/cli/test2/Main.check.java5_j9
index 2dcb6e892a..2dcb6e892a 100644
--- a/test/files/cli/test2/Main.check.java5_j9
+++ b/test/attic/files/cli/test2/Main.check.java5_j9
diff --git a/test/files/cli/test2/Main.check.javac b/test/attic/files/cli/test2/Main.check.javac
index c40c0a7a89..c40c0a7a89 100644
--- a/test/files/cli/test2/Main.check.javac
+++ b/test/attic/files/cli/test2/Main.check.javac
diff --git a/test/files/cli/test2/Main.check.javac5 b/test/attic/files/cli/test2/Main.check.javac5
index 0ac32b056e..0ac32b056e 100644
--- a/test/files/cli/test2/Main.check.javac5
+++ b/test/attic/files/cli/test2/Main.check.javac5
diff --git a/test/files/cli/test2/Main.check.javac6 b/test/attic/files/cli/test2/Main.check.javac6
index 350d3253bc..350d3253bc 100644
--- a/test/files/cli/test2/Main.check.javac6
+++ b/test/attic/files/cli/test2/Main.check.javac6
diff --git a/test/files/cli/test2/Main.check.jikes b/test/attic/files/cli/test2/Main.check.jikes
index 97943e8347..97943e8347 100644
--- a/test/files/cli/test2/Main.check.jikes
+++ b/test/attic/files/cli/test2/Main.check.jikes
diff --git a/test/files/cli/test2/Main.check.jikes5 b/test/attic/files/cli/test2/Main.check.jikes5
index 97943e8347..97943e8347 100644
--- a/test/files/cli/test2/Main.check.jikes5
+++ b/test/attic/files/cli/test2/Main.check.jikes5
diff --git a/test/files/cli/test2/Main.check.scala b/test/attic/files/cli/test2/Main.check.scala
index 7e5f17625b..7e5f17625b 100644
--- a/test/files/cli/test2/Main.check.scala
+++ b/test/attic/files/cli/test2/Main.check.scala
diff --git a/test/files/cli/test2/Main.check.scala_api b/test/attic/files/cli/test2/Main.check.scala_api
index bcb0f0c7fb..bcb0f0c7fb 100644
--- a/test/files/cli/test2/Main.check.scala_api
+++ b/test/attic/files/cli/test2/Main.check.scala_api
diff --git a/test/files/cli/test2/Main.check.scala_j9 b/test/attic/files/cli/test2/Main.check.scala_j9
index 80cbb50fa9..80cbb50fa9 100644
--- a/test/files/cli/test2/Main.check.scala_j9
+++ b/test/attic/files/cli/test2/Main.check.scala_j9
diff --git a/test/files/cli/test2/Main.check.scalac b/test/attic/files/cli/test2/Main.check.scalac
index 8465810d0b..8465810d0b 100644
--- a/test/files/cli/test2/Main.check.scalac
+++ b/test/attic/files/cli/test2/Main.check.scalac
diff --git a/test/files/cli/test2/Main.check.scalaint b/test/attic/files/cli/test2/Main.check.scalaint
index 89b6766bb5..89b6766bb5 100644
--- a/test/files/cli/test2/Main.check.scalaint
+++ b/test/attic/files/cli/test2/Main.check.scalaint
diff --git a/test/files/cli/test2/Main.java b/test/attic/files/cli/test2/Main.java
index f6797632bf..f6797632bf 100644
--- a/test/files/cli/test2/Main.java
+++ b/test/attic/files/cli/test2/Main.java
diff --git a/test/files/cli/test2/Main.scala b/test/attic/files/cli/test2/Main.scala
index 1d43759fdf..1d43759fdf 100644
--- a/test/files/cli/test2/Main.scala
+++ b/test/attic/files/cli/test2/Main.scala
diff --git a/test/files/cli/test3/Main.check.j9vm5 b/test/attic/files/cli/test3/Main.check.j9vm5
index a094dc8daf..a094dc8daf 100644
--- a/test/files/cli/test3/Main.check.j9vm5
+++ b/test/attic/files/cli/test3/Main.check.j9vm5
diff --git a/test/files/cli/test3/Main.check.java b/test/attic/files/cli/test3/Main.check.java
index de3eb7b136..de3eb7b136 100644
--- a/test/files/cli/test3/Main.check.java
+++ b/test/attic/files/cli/test3/Main.check.java
diff --git a/test/files/cli/test3/Main.check.java5 b/test/attic/files/cli/test3/Main.check.java5
index de3eb7b136..de3eb7b136 100644
--- a/test/files/cli/test3/Main.check.java5
+++ b/test/attic/files/cli/test3/Main.check.java5
diff --git a/test/files/cli/test3/Main.check.java5_api b/test/attic/files/cli/test3/Main.check.java5_api
index f6112211f0..f6112211f0 100644
--- a/test/files/cli/test3/Main.check.java5_api
+++ b/test/attic/files/cli/test3/Main.check.java5_api
diff --git a/test/files/cli/test3/Main.check.java5_j9 b/test/attic/files/cli/test3/Main.check.java5_j9
index 9e228d7649..9e228d7649 100644
--- a/test/files/cli/test3/Main.check.java5_j9
+++ b/test/attic/files/cli/test3/Main.check.java5_j9
diff --git a/test/files/cli/test3/Main.check.javac b/test/attic/files/cli/test3/Main.check.javac
index 8d235b647b..8d235b647b 100644
--- a/test/files/cli/test3/Main.check.javac
+++ b/test/attic/files/cli/test3/Main.check.javac
diff --git a/test/files/cli/test3/Main.check.javac5 b/test/attic/files/cli/test3/Main.check.javac5
index 3a48fa000e..3a48fa000e 100644
--- a/test/files/cli/test3/Main.check.javac5
+++ b/test/attic/files/cli/test3/Main.check.javac5
diff --git a/test/files/cli/test3/Main.check.javac6 b/test/attic/files/cli/test3/Main.check.javac6
index 677b950aed..677b950aed 100644
--- a/test/files/cli/test3/Main.check.javac6
+++ b/test/attic/files/cli/test3/Main.check.javac6
diff --git a/test/files/cli/test3/Main.check.jikes b/test/attic/files/cli/test3/Main.check.jikes
index 604333e81a..604333e81a 100644
--- a/test/files/cli/test3/Main.check.jikes
+++ b/test/attic/files/cli/test3/Main.check.jikes
diff --git a/test/files/cli/test3/Main.check.jikes5 b/test/attic/files/cli/test3/Main.check.jikes5
index 604333e81a..604333e81a 100644
--- a/test/files/cli/test3/Main.check.jikes5
+++ b/test/attic/files/cli/test3/Main.check.jikes5
diff --git a/test/files/cli/test3/Main.check.scala b/test/attic/files/cli/test3/Main.check.scala
index f78729b9a2..f78729b9a2 100644
--- a/test/files/cli/test3/Main.check.scala
+++ b/test/attic/files/cli/test3/Main.check.scala
diff --git a/test/files/cli/test3/Main.check.scala_api b/test/attic/files/cli/test3/Main.check.scala_api
index 4552819b5b..4552819b5b 100644
--- a/test/files/cli/test3/Main.check.scala_api
+++ b/test/attic/files/cli/test3/Main.check.scala_api
diff --git a/test/files/cli/test3/Main.check.scala_j9 b/test/attic/files/cli/test3/Main.check.scala_j9
index 3804c17636..3804c17636 100644
--- a/test/files/cli/test3/Main.check.scala_j9
+++ b/test/attic/files/cli/test3/Main.check.scala_j9
diff --git a/test/files/cli/test3/Main.check.scalac b/test/attic/files/cli/test3/Main.check.scalac
index 8465810d0b..8465810d0b 100644
--- a/test/files/cli/test3/Main.check.scalac
+++ b/test/attic/files/cli/test3/Main.check.scalac
diff --git a/test/files/cli/test3/Main.check.scalaint b/test/attic/files/cli/test3/Main.check.scalaint
index cffa02c5b6..cffa02c5b6 100644
--- a/test/files/cli/test3/Main.check.scalaint
+++ b/test/attic/files/cli/test3/Main.check.scalaint
diff --git a/test/files/cli/test3/Main.java b/test/attic/files/cli/test3/Main.java
index 208863d012..208863d012 100644
--- a/test/files/cli/test3/Main.java
+++ b/test/attic/files/cli/test3/Main.java
diff --git a/test/files/cli/test3/Main.scala b/test/attic/files/cli/test3/Main.scala
index 63fc11b771..63fc11b771 100644
--- a/test/files/cli/test3/Main.scala
+++ b/test/attic/files/cli/test3/Main.scala
diff --git a/test/continuations/neg/function0.check b/test/continuations/neg/function0.check
new file mode 100644
index 0000000000..0a66763a0f
--- /dev/null
+++ b/test/continuations/neg/function0.check
@@ -0,0 +1,6 @@
+function0.scala:11: error: type mismatch;
+ found : () => Int @scala.util.continuations.cpsParam[Int,Int]
+ required: () => Int
+ val g: () => Int = f
+ ^
+one error found
diff --git a/test/continuations/neg/function0.scala b/test/continuations/neg/function0.scala
new file mode 100644
index 0000000000..4112ee3835
--- /dev/null
+++ b/test/continuations/neg/function0.scala
@@ -0,0 +1,16 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+
+ def main(args: Array[String]): Any = {
+
+ val f = () => shift { k: (Int=>Int) => k(7) }
+ val g: () => Int = f
+
+ println(reset(g()))
+ }
+
+} \ No newline at end of file
diff --git a/test/continuations/neg/function2.check b/test/continuations/neg/function2.check
new file mode 100644
index 0000000000..4833057652
--- /dev/null
+++ b/test/continuations/neg/function2.check
@@ -0,0 +1,6 @@
+function2.scala:11: error: type mismatch;
+ found : () => Int
+ required: () => Int @util.continuations.package.cps[Int]
+ val g: () => Int @cps[Int] = f
+ ^
+one error found
diff --git a/test/continuations/neg/function2.scala b/test/continuations/neg/function2.scala
new file mode 100644
index 0000000000..ae0fda509d
--- /dev/null
+++ b/test/continuations/neg/function2.scala
@@ -0,0 +1,16 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+
+ def main(args: Array[String]): Any = {
+
+ val f = () => 7
+ val g: () => Int @cps[Int] = f
+
+ println(reset(g()))
+ }
+
+} \ No newline at end of file
diff --git a/test/continuations/neg/function3.check b/test/continuations/neg/function3.check
new file mode 100644
index 0000000000..4705ad9ed9
--- /dev/null
+++ b/test/continuations/neg/function3.check
@@ -0,0 +1,6 @@
+function3.scala:10: error: type mismatch;
+ found : Int @scala.util.continuations.cpsParam[Int,Int]
+ required: Int
+ val g: () => Int = () => shift { k: (Int=>Int) => k(7) }
+ ^
+one error found
diff --git a/test/continuations/neg/function3.scala b/test/continuations/neg/function3.scala
new file mode 100644
index 0000000000..0c3f1667e5
--- /dev/null
+++ b/test/continuations/neg/function3.scala
@@ -0,0 +1,15 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+
+ def main(args: Array[String]): Any = {
+
+ val g: () => Int = () => shift { k: (Int=>Int) => k(7) }
+
+ println(reset(g()))
+ }
+
+} \ No newline at end of file
diff --git a/test/continuations/neg/infer0.check b/test/continuations/neg/infer0.check
new file mode 100644
index 0000000000..1dd072ef09
--- /dev/null
+++ b/test/continuations/neg/infer0.check
@@ -0,0 +1,4 @@
+infer0.scala:11: error: cannot cps-transform expression 8: type arguments [Int(8),String,Int] do not conform to method shiftUnit's type parameter bounds [A,B,C >: B]
+ test(8)
+ ^
+one error found
diff --git a/test/continuations/neg/infer0.scala b/test/continuations/neg/infer0.scala
new file mode 100644
index 0000000000..894d5228b1
--- /dev/null
+++ b/test/continuations/neg/infer0.scala
@@ -0,0 +1,14 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+
+ def test(x: => Int @cpsParam[String,Int]) = 7
+
+ def main(args: Array[String]): Any = {
+ test(8)
+ }
+
+} \ No newline at end of file
diff --git a/test/continuations/neg/infer2.check b/test/continuations/neg/infer2.check
new file mode 100644
index 0000000000..59eb670bc3
--- /dev/null
+++ b/test/continuations/neg/infer2.check
@@ -0,0 +1,4 @@
+infer2.scala:14: error: illegal answer type modification: scala.util.continuations.cpsParam[String,Int] andThen scala.util.continuations.cpsParam[String,Int]
+ test { sym(); sym() }
+ ^
+one error found
diff --git a/test/continuations/neg/infer2.scala b/test/continuations/neg/infer2.scala
new file mode 100644
index 0000000000..a890ac1fc4
--- /dev/null
+++ b/test/continuations/neg/infer2.scala
@@ -0,0 +1,19 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+
+ def test(x: => Int @cpsParam[String,Int]) = 7
+
+ def sym() = shift { k: (Int => String) => 9 }
+
+
+ def main(args: Array[String]): Any = {
+ test { sym(); sym() }
+ }
+
+}
+
+
diff --git a/test/continuations/neg/lazy.check b/test/continuations/neg/lazy.check
new file mode 100644
index 0000000000..bfa44c59a4
--- /dev/null
+++ b/test/continuations/neg/lazy.check
@@ -0,0 +1,6 @@
+lazy.scala:5: error: type mismatch;
+ found : Unit @scala.util.continuations.cpsParam[Unit,Unit]
+ required: Unit
+ def foo = {
+ ^
+one error found
diff --git a/test/continuations/neg/lazy.scala b/test/continuations/neg/lazy.scala
new file mode 100644
index 0000000000..dffc57ffa0
--- /dev/null
+++ b/test/continuations/neg/lazy.scala
@@ -0,0 +1,16 @@
+import scala.util.continuations._
+
+object Test {
+
+ def foo = {
+ lazy val x = shift((k:Unit=>Unit)=>k())
+ println(x)
+ }
+
+ def main(args: Array[String]) {
+ reset {
+ foo
+ }
+ }
+
+} \ No newline at end of file
diff --git a/test/continuations/neg/t1929.check b/test/continuations/neg/t1929.check
new file mode 100644
index 0000000000..f42c3a1e15
--- /dev/null
+++ b/test/continuations/neg/t1929.check
@@ -0,0 +1,6 @@
+t1929.scala:8: error: type mismatch;
+ found : Int @scala.util.continuations.cpsParam[String,java.lang.String] @scala.util.continuations.cpsSynth
+ required: Int @scala.util.continuations.cpsParam[Int,java.lang.String]
+ reset {
+ ^
+one error found
diff --git a/test/continuations/neg/t1929.scala b/test/continuations/neg/t1929.scala
new file mode 100644
index 0000000000..02eda9170d
--- /dev/null
+++ b/test/continuations/neg/t1929.scala
@@ -0,0 +1,17 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+ def main(args : Array[String]) {
+ reset {
+ println("up")
+ val x = shift((k:Int=>String) => k(8) + k(2))
+ println("down " + x)
+ val y = shift((k:Int=>String) => k(3))
+ println("down2 " + y)
+ y + x
+ }
+ }
+} \ No newline at end of file
diff --git a/test/continuations/neg/t2285.check b/test/continuations/neg/t2285.check
new file mode 100644
index 0000000000..d5dff6a4f2
--- /dev/null
+++ b/test/continuations/neg/t2285.check
@@ -0,0 +1,6 @@
+t2285.scala:9: error: type mismatch;
+ found : Int @scala.util.continuations.cpsParam[String,String] @scala.util.continuations.cpsSynth
+ required: Int @scala.util.continuations.cpsParam[Int,String]
+ def foo() = reset { bar(); 7 }
+ ^
+one error found
diff --git a/test/continuations/neg/t2285.scala b/test/continuations/neg/t2285.scala
new file mode 100644
index 0000000000..f3c7f4c89c
--- /dev/null
+++ b/test/continuations/neg/t2285.scala
@@ -0,0 +1,11 @@
+// $Id$
+
+import scala.util.continuations._
+
+object Test {
+
+ def bar() = shift { k: (String => String) => k("1") }
+
+ def foo() = reset { bar(); 7 }
+
+}
diff --git a/test/continuations/neg/t2949.check b/test/continuations/neg/t2949.check
new file mode 100644
index 0000000000..dd9768807c
--- /dev/null
+++ b/test/continuations/neg/t2949.check
@@ -0,0 +1,6 @@
+t2949.scala:13: error: type mismatch;
+ found : Int
+ required: ? @scala.util.continuations.cpsParam[List[?],Any]
+ x * y
+ ^
+one error found
diff --git a/test/continuations/neg/t2949.scala b/test/continuations/neg/t2949.scala
new file mode 100644
index 0000000000..ce27c7c0e8
--- /dev/null
+++ b/test/continuations/neg/t2949.scala
@@ -0,0 +1,15 @@
+// $Id$
+
+import scala.util.continuations._
+
+object Test {
+
+ def reflect[A,B](xs : List[A]) = shift{ xs.flatMap[B, List[B]] }
+ def reify[A, B](x : A @cpsParam[List[A], B]) = reset{ List(x) }
+
+ def main(args: Array[String]): Unit = println(reify {
+ val x = reflect[Int, Int](List(1,2,3))
+ val y = reflect[Int, Int](List(2,4,8))
+ x * y
+ })
+}
diff --git a/test/continuations/neg/trycatch2.check b/test/continuations/neg/trycatch2.check
new file mode 100644
index 0000000000..5ff2838bad
--- /dev/null
+++ b/test/continuations/neg/trycatch2.check
@@ -0,0 +1,7 @@
+trycatch2.scala:11: error: only simple cps types allowed in try/catch blocks (found: Int @scala.util.continuations.cpsParam[String,Int])
+ def foo1 = try {
+ ^
+trycatch2.scala:19: error: only simple cps types allowed in try/catch blocks (found: Int @scala.util.continuations.cpsParam[String,Int])
+ def foo2 = try {
+ ^
+two errors found
diff --git a/test/continuations/neg/trycatch2.scala b/test/continuations/neg/trycatch2.scala
new file mode 100644
index 0000000000..761cee52ac
--- /dev/null
+++ b/test/continuations/neg/trycatch2.scala
@@ -0,0 +1,33 @@
+// $Id$
+
+import scala.util.continuations._
+
+object Test {
+
+ def fatal[T]: T = throw new Exception
+ def cpsIntStringInt = shift { k:(Int=>String) => k(3); 7 }
+ def cpsIntIntString = shift { k:(Int=>Int) => k(3); "7" }
+
+ def foo1 = try {
+ fatal[Int]
+ cpsIntStringInt
+ } catch {
+ case ex =>
+ cpsIntStringInt
+ }
+
+ def foo2 = try {
+ fatal[Int]
+ cpsIntStringInt
+ } catch {
+ case ex =>
+ cpsIntStringInt
+ }
+
+
+ def main(args: Array[String]): Unit = {
+ println(reset { foo1; "3" })
+ println(reset { foo2; "3" })
+ }
+
+} \ No newline at end of file
diff --git a/test/continuations/run/basics.check b/test/continuations/run/basics.check
new file mode 100755
index 0000000000..54c059fdcb
--- /dev/null
+++ b/test/continuations/run/basics.check
@@ -0,0 +1,2 @@
+28
+28 \ No newline at end of file
diff --git a/test/continuations/run/basics.scala b/test/continuations/run/basics.scala
new file mode 100755
index 0000000000..9df209b11c
--- /dev/null
+++ b/test/continuations/run/basics.scala
@@ -0,0 +1,23 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+
+ def m0() = {
+ shift((k:Int => Int) => k(k(7))) * 2
+ }
+
+ def m1() = {
+ 2 * shift((k:Int => Int) => k(k(7)))
+ }
+
+ def main(args: Array[String]) = {
+
+ println(reset(m0()))
+ println(reset(m1()))
+
+ }
+
+}
diff --git a/test/continuations/run/function1.check b/test/continuations/run/function1.check
new file mode 100644
index 0000000000..7f8f011eb7
--- /dev/null
+++ b/test/continuations/run/function1.check
@@ -0,0 +1 @@
+7
diff --git a/test/continuations/run/function1.scala b/test/continuations/run/function1.scala
new file mode 100644
index 0000000000..3b39722e3a
--- /dev/null
+++ b/test/continuations/run/function1.scala
@@ -0,0 +1,16 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+
+ def main(args: Array[String]): Any = {
+
+ val f = () => shift { k: (Int=>Int) => k(7) }
+ val g: () => Int @cps[Int] = f
+
+ println(reset(g()))
+ }
+
+} \ No newline at end of file
diff --git a/test/continuations/run/function4.check b/test/continuations/run/function4.check
new file mode 100644
index 0000000000..c7930257df
--- /dev/null
+++ b/test/continuations/run/function4.check
@@ -0,0 +1 @@
+7 \ No newline at end of file
diff --git a/test/continuations/run/function4.scala b/test/continuations/run/function4.scala
new file mode 100644
index 0000000000..b73eedb02c
--- /dev/null
+++ b/test/continuations/run/function4.scala
@@ -0,0 +1,15 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+
+ def main(args: Array[String]): Any = {
+
+ val g: () => Int @cps[Int] = () => shift { k: (Int=>Int) => k(7) }
+
+ println(reset(g()))
+ }
+
+} \ No newline at end of file
diff --git a/test/continuations/run/function5.check b/test/continuations/run/function5.check
new file mode 100644
index 0000000000..c7930257df
--- /dev/null
+++ b/test/continuations/run/function5.check
@@ -0,0 +1 @@
+7 \ No newline at end of file
diff --git a/test/continuations/run/function5.scala b/test/continuations/run/function5.scala
new file mode 100644
index 0000000000..a689ccf243
--- /dev/null
+++ b/test/continuations/run/function5.scala
@@ -0,0 +1,15 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+
+ def main(args: Array[String]): Any = {
+
+ val g: () => Int @cps[Int] = () => 7
+
+ println(reset(g()))
+ }
+
+} \ No newline at end of file
diff --git a/test/continuations/run/function6.check b/test/continuations/run/function6.check
new file mode 100644
index 0000000000..c7930257df
--- /dev/null
+++ b/test/continuations/run/function6.check
@@ -0,0 +1 @@
+7 \ No newline at end of file
diff --git a/test/continuations/run/function6.scala b/test/continuations/run/function6.scala
new file mode 100644
index 0000000000..1a2792370a
--- /dev/null
+++ b/test/continuations/run/function6.scala
@@ -0,0 +1,16 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+
+ def main(args: Array[String]): Any = {
+
+ val g: PartialFunction[Int, Int @cps[Int]] = { case x => 7 }
+
+ println(reset(g(2)))
+
+ }
+
+} \ No newline at end of file
diff --git a/test/continuations/run/ifelse0.check b/test/continuations/run/ifelse0.check
new file mode 100644
index 0000000000..f8bc79860d
--- /dev/null
+++ b/test/continuations/run/ifelse0.check
@@ -0,0 +1,2 @@
+10
+9 \ No newline at end of file
diff --git a/test/continuations/run/ifelse0.scala b/test/continuations/run/ifelse0.scala
new file mode 100644
index 0000000000..e34b86ee84
--- /dev/null
+++ b/test/continuations/run/ifelse0.scala
@@ -0,0 +1,18 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+
+ def test(x:Int) = if (x <= 7)
+ shift { k: (Int=>Int) => k(k(k(x))) }
+ else
+ shift { k: (Int=>Int) => k(x) }
+
+ def main(args: Array[String]): Any = {
+ println(reset(1 + test(7)))
+ println(reset(1 + test(8)))
+ }
+
+} \ No newline at end of file
diff --git a/test/continuations/run/ifelse1.check b/test/continuations/run/ifelse1.check
new file mode 100644
index 0000000000..86a3fbc0c1
--- /dev/null
+++ b/test/continuations/run/ifelse1.check
@@ -0,0 +1,4 @@
+10
+9
+8
+11 \ No newline at end of file
diff --git a/test/continuations/run/ifelse1.scala b/test/continuations/run/ifelse1.scala
new file mode 100644
index 0000000000..2ccc1ed730
--- /dev/null
+++ b/test/continuations/run/ifelse1.scala
@@ -0,0 +1,25 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+
+ def test1(x:Int) = if (x <= 7)
+ shift { k: (Int=>Int) => k(k(k(x))) }
+ else
+ x
+
+ def test2(x:Int) = if (x <= 7)
+ x
+ else
+ shift { k: (Int=>Int) => k(k(k(x))) }
+
+ def main(args: Array[String]): Any = {
+ println(reset(1 + test1(7)))
+ println(reset(1 + test1(8)))
+ println(reset(1 + test2(7)))
+ println(reset(1 + test2(8)))
+ }
+
+} \ No newline at end of file
diff --git a/test/continuations/run/ifelse2.check b/test/continuations/run/ifelse2.check
new file mode 100644
index 0000000000..f97a95b08d
--- /dev/null
+++ b/test/continuations/run/ifelse2.check
@@ -0,0 +1,4 @@
+abort
+()
+alive
+()
diff --git a/test/continuations/run/ifelse2.scala b/test/continuations/run/ifelse2.scala
new file mode 100644
index 0000000000..536e350190
--- /dev/null
+++ b/test/continuations/run/ifelse2.scala
@@ -0,0 +1,16 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+
+ def test(x:Int) = if (x <= 7)
+ shift { k: (Unit=>Unit) => println("abort") }
+
+ def main(args: Array[String]): Any = {
+ println(reset{ test(7); println("alive") })
+ println(reset{ test(8); println("alive") })
+ }
+
+} \ No newline at end of file
diff --git a/test/continuations/run/ifelse3.check b/test/continuations/run/ifelse3.check
new file mode 100644
index 0000000000..95b562c8e6
--- /dev/null
+++ b/test/continuations/run/ifelse3.check
@@ -0,0 +1,2 @@
+6
+9
diff --git a/test/continuations/run/ifelse3.scala b/test/continuations/run/ifelse3.scala
new file mode 100644
index 0000000000..5dbd079d1c
--- /dev/null
+++ b/test/continuations/run/ifelse3.scala
@@ -0,0 +1,21 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+
+ def util(x: Boolean) = shift { k: (Boolean=>Int) => k(x) }
+
+ def test(x:Int) = if (util(x <= 7))
+ x - 1
+ else
+ x + 1
+
+
+ def main(args: Array[String]): Any = {
+ println(reset(test(7)))
+ println(reset(test(8)))
+ }
+
+} \ No newline at end of file
diff --git a/test/continuations/run/infer1.scala b/test/continuations/run/infer1.scala
new file mode 100644
index 0000000000..a6c6c07215
--- /dev/null
+++ b/test/continuations/run/infer1.scala
@@ -0,0 +1,33 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+
+ def test(x: => Int @cpsParam[String,Int]) = 7
+
+ def test2() = {
+ val x = shift { k: (Int => String) => 9 }
+ x
+ }
+
+ def test3(x: => Int @cpsParam[Int,Int]) = 7
+
+
+ def util() = shift { k: (String => String) => "7" }
+
+ def main(args: Array[String]): Any = {
+ test { shift { k: (Int => String) => 9 } }
+ test { shift { k: (Int => String) => 9 }; 2 }
+// test { shift { k: (Int => String) => 9 }; util() } <-- doesn't work
+ test { shift { k: (Int => String) => 9 }; util(); 2 }
+
+
+ test { shift { k: (Int => String) => 9 }; { test3(0); 2 } }
+
+ test3 { { test3(0); 2 } }
+
+ }
+
+} \ No newline at end of file
diff --git a/test/continuations/run/match0.check b/test/continuations/run/match0.check
new file mode 100644
index 0000000000..f8bc79860d
--- /dev/null
+++ b/test/continuations/run/match0.check
@@ -0,0 +1,2 @@
+10
+9 \ No newline at end of file
diff --git a/test/continuations/run/match0.scala b/test/continuations/run/match0.scala
new file mode 100644
index 0000000000..bd36238d7f
--- /dev/null
+++ b/test/continuations/run/match0.scala
@@ -0,0 +1,18 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+
+ def test(x:Int) = x match {
+ case 7 => shift { k: (Int=>Int) => k(k(k(x))) }
+ case 8 => shift { k: (Int=>Int) => k(x) }
+ }
+
+ def main(args: Array[String]): Any = {
+ println(reset(1 + test(7)))
+ println(reset(1 + test(8)))
+ }
+
+} \ No newline at end of file
diff --git a/test/continuations/run/match1.check b/test/continuations/run/match1.check
new file mode 100644
index 0000000000..73053d3f4f
--- /dev/null
+++ b/test/continuations/run/match1.check
@@ -0,0 +1,2 @@
+10
+9
diff --git a/test/continuations/run/match1.scala b/test/continuations/run/match1.scala
new file mode 100644
index 0000000000..ea4e219666
--- /dev/null
+++ b/test/continuations/run/match1.scala
@@ -0,0 +1,18 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+
+ def test(x:Int) = x match {
+ case 7 => shift { k: (Int=>Int) => k(k(k(x))) }
+ case _ => x
+ }
+
+ def main(args: Array[String]): Any = {
+ println(reset(1 + test(7)))
+ println(reset(1 + test(8)))
+ }
+
+} \ No newline at end of file
diff --git a/test/continuations/run/match2.check b/test/continuations/run/match2.check
new file mode 100644
index 0000000000..cbf91349cc
--- /dev/null
+++ b/test/continuations/run/match2.check
@@ -0,0 +1,2 @@
+B
+B
diff --git a/test/continuations/run/match2.scala b/test/continuations/run/match2.scala
new file mode 100644
index 0000000000..8d4f04870f
--- /dev/null
+++ b/test/continuations/run/match2.scala
@@ -0,0 +1,26 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+
+ def test1() = {
+ val (a, b) = shift { k: (((String,String)) => String) => k("A","B") }
+ b
+ }
+
+ case class Elem[T,U](a: T, b: U)
+
+ def test2() = {
+ val Elem(a,b) = shift { k: (Elem[String,String] => String) => k(Elem("A","B")) }
+ b
+ }
+
+
+ def main(args: Array[String]): Any = {
+ println(reset(test1()))
+ println(reset(test2()))
+ }
+
+} \ No newline at end of file
diff --git a/test/continuations/run/t1807.check b/test/continuations/run/t1807.check
new file mode 100644
index 0000000000..56a6051ca2
--- /dev/null
+++ b/test/continuations/run/t1807.check
@@ -0,0 +1 @@
+1 \ No newline at end of file
diff --git a/test/continuations/run/t1807.scala b/test/continuations/run/t1807.scala
new file mode 100644
index 0000000000..278b3a9936
--- /dev/null
+++ b/test/continuations/run/t1807.scala
@@ -0,0 +1,14 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ val z = reset {
+ val f: (() => Int @cps[Int]) = () => 1
+ f()
+ }
+ println(z)
+ }
+} \ No newline at end of file
diff --git a/test/continuations/run/t1808.scala b/test/continuations/run/t1808.scala
new file mode 100644
index 0000000000..125c7c1cdf
--- /dev/null
+++ b/test/continuations/run/t1808.scala
@@ -0,0 +1,10 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ reset0 { 0 }
+ }
+} \ No newline at end of file
diff --git a/test/continuations/run/t1820.scala b/test/continuations/run/t1820.scala
new file mode 100644
index 0000000000..893ddab6d1
--- /dev/null
+++ b/test/continuations/run/t1820.scala
@@ -0,0 +1,14 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+ def shifted: Unit @suspendable = shift { (k: Unit => Unit) => () }
+ def test1(b: => Boolean) = {
+ reset {
+ if (b) shifted
+ }
+ }
+ def main(args: Array[String]) = test1(true)
+} \ No newline at end of file
diff --git a/test/continuations/run/t1821.check b/test/continuations/run/t1821.check
new file mode 100644
index 0000000000..f7b76115db
--- /dev/null
+++ b/test/continuations/run/t1821.check
@@ -0,0 +1,4 @@
+()
+()
+()
+() \ No newline at end of file
diff --git a/test/continuations/run/t1821.scala b/test/continuations/run/t1821.scala
new file mode 100644
index 0000000000..0d5fb553be
--- /dev/null
+++ b/test/continuations/run/t1821.scala
@@ -0,0 +1,20 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+ def suspended[A](x: A): A @suspendable = x
+ def test1[A](x: A): A @suspendable = suspended(x) match { case x => x }
+ def test2[A](x: List[A]): A @suspendable = suspended(x) match { case List(x) => x }
+
+ def test3[A](x: A): A @suspendable = x match { case x => x }
+ def test4[A](x: List[A]): A @suspendable = x match { case List(x) => x }
+
+ def main(args: Array[String]) = {
+ println(reset(test1()))
+ println(reset(test2(List(()))))
+ println(reset(test3()))
+ println(reset(test4(List(()))))
+ }
+} \ No newline at end of file
diff --git a/test/continuations/run/t2864.check b/test/continuations/run/t2864.check
new file mode 100644
index 0000000000..d411bb7c1a
--- /dev/null
+++ b/test/continuations/run/t2864.check
@@ -0,0 +1 @@
+400
diff --git a/test/continuations/run/t2864.scala b/test/continuations/run/t2864.scala
new file mode 100644
index 0000000000..7a2579e45c
--- /dev/null
+++ b/test/continuations/run/t2864.scala
@@ -0,0 +1,30 @@
+import scala.util.continuations._
+object Test {
+
+ def double[B](n : Int)(k : Int => B) : B = k(n * 2)
+
+ def main(args : Array[String]) {
+ reset {
+ val result1 = shift(double[Unit](100))
+ val result2 = shift(double[Unit](result1))
+ println(result2)
+ }
+ }
+
+ def foo: Int @cps[Int] = {
+ val a0 = shift((k:Int=>Int) => k(0))
+ val x0 = 2
+ val a1 = shift((k:Int=>Int) => x0)
+ 0
+ }
+
+/*
+ def bar: ControlContext[Int,Int,Int] = {
+ shiftR((k:Int=>Int) => k(0)).flatMap { a0 =>
+ val x0 = 2
+ shiftR((k:Int=>Int) => x0).map { a1 =>
+ 0
+ }}
+ }
+*/
+} \ No newline at end of file
diff --git a/test/continuations/run/t2934.check b/test/continuations/run/t2934.check
new file mode 100644
index 0000000000..a92586538e
--- /dev/null
+++ b/test/continuations/run/t2934.check
@@ -0,0 +1 @@
+List(3, 4, 5)
diff --git a/test/continuations/run/t2934.scala b/test/continuations/run/t2934.scala
new file mode 100644
index 0000000000..a1b8ca9e04
--- /dev/null
+++ b/test/continuations/run/t2934.scala
@@ -0,0 +1,10 @@
+import scala.util.continuations._
+
+object Test {
+ def main(args : Array[String]) {
+ println(reset {
+ val x = shift(List(1,2,3).flatMap[Int, List[Int]])
+ List(x + 2)
+ })
+ }
+}
diff --git a/test/continuations/run/t3199.check b/test/continuations/run/t3199.check
new file mode 100644
index 0000000000..a065247b8c
--- /dev/null
+++ b/test/continuations/run/t3199.check
@@ -0,0 +1 @@
+Right(7)
diff --git a/test/continuations/run/t3199.scala b/test/continuations/run/t3199.scala
new file mode 100644
index 0000000000..3fd2f1959a
--- /dev/null
+++ b/test/continuations/run/t3199.scala
@@ -0,0 +1,20 @@
+import _root_.scala.collection.Seq
+import _root_.scala.util.control.Exception
+import _root_.scala.util.continuations._
+
+object Test {
+
+ trait AbstractResource[+R <: AnyRef] {
+ def reflect[B] : R @cpsParam[B,Either[Throwable, B]] = shift(acquireFor)
+ def acquireFor[B](f : R => B) : Either[Throwable, B] = {
+ import Exception._
+ catching(List(classOf[Throwable]) : _*) either (f(null.asInstanceOf[R]))
+ }
+ }
+
+ def main(args: Array[String]) : Unit = {
+ val x = new AbstractResource[String] { }
+ val result = x.acquireFor( x => 7 )
+ println(result)
+ }
+ }
diff --git a/test/continuations/run/t3199b.check b/test/continuations/run/t3199b.check
new file mode 100644
index 0000000000..b5d8bb58d9
--- /dev/null
+++ b/test/continuations/run/t3199b.check
@@ -0,0 +1 @@
+[1, 2, 3]
diff --git a/test/continuations/run/t3199b.scala b/test/continuations/run/t3199b.scala
new file mode 100644
index 0000000000..950c584153
--- /dev/null
+++ b/test/continuations/run/t3199b.scala
@@ -0,0 +1,11 @@
+object Test {
+
+ def test() = {
+ java.util.Arrays.asList(Array(1,2,3):_*)
+ }
+
+ def main(args: Array[String]) = {
+ println(test())
+ }
+
+} \ No newline at end of file
diff --git a/test/continuations/run/t3223.check b/test/continuations/run/t3223.check
new file mode 100644
index 0000000000..ec635144f6
--- /dev/null
+++ b/test/continuations/run/t3223.check
@@ -0,0 +1 @@
+9
diff --git a/test/continuations/run/t3223.scala b/test/continuations/run/t3223.scala
new file mode 100644
index 0000000000..4e510178e6
--- /dev/null
+++ b/test/continuations/run/t3223.scala
@@ -0,0 +1,19 @@
+import scala.util.continuations._
+object Test {
+
+ def foo(x:Int) = {
+ try {
+ throw new Exception
+ shiftUnit0[Int,Int](7)
+ } catch {
+ case ex =>
+ val g = (a:Int)=>a
+ 9
+ }
+ }
+
+ def main(args: Array[String]) {
+ println(reset(foo(0)))
+ }
+
+} \ No newline at end of file
diff --git a/test/continuations/run/t3225.check b/test/continuations/run/t3225.check
new file mode 100644
index 0000000000..df1a8a9ce4
--- /dev/null
+++ b/test/continuations/run/t3225.check
@@ -0,0 +1,12 @@
+8
+8
+9
+9
+8
+9
+8
+8
+9
+9
+8
+9
diff --git a/test/continuations/run/t3225.scala b/test/continuations/run/t3225.scala
new file mode 100644
index 0000000000..ecfde279cf
--- /dev/null
+++ b/test/continuations/run/t3225.scala
@@ -0,0 +1,56 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+
+ class Bla {
+ val x = 8
+ def y[T] = 9
+ }
+
+/*
+ def bla[A] = shift { k:(Bla=>A) => k(new Bla) }
+*/
+
+ def bla1 = shift { k:(Bla=>Bla) => k(new Bla) }
+ def bla2 = shift { k:(Bla=>Int) => k(new Bla) }
+
+ def fooA = bla2.x
+ def fooB[T] = bla2.y[T]
+
+ def testMono() = {
+ println(reset(bla1).x)
+ println(reset(bla2.x))
+ println(reset(bla2.y[Int]))
+ println(reset(bla2.y))
+ println(reset(fooA))
+ println(reset(fooB))
+ 0
+ }
+
+ def blaX[A] = shift { k:(Bla=>A) => k(new Bla) }
+
+ def fooX[A] = blaX[A].x
+ def fooY[A] = blaX[A].y[A]
+
+ def testPoly() = {
+ println(reset(blaX[Bla]).x)
+ println(reset(blaX[Int].x))
+ println(reset(blaX[Int].y[Int]))
+ println(reset(blaX[Int].y))
+ println(reset(fooX[Int]))
+ println(reset(fooY[Int]))
+ 0
+ }
+
+
+ // TODO: check whether this also applies to a::shift { k => ... }
+
+ def main(args: Array[String]) = {
+ testMono()
+ testPoly()
+ }
+
+}
diff --git a/test/continuations/run/trycatch0.check b/test/continuations/run/trycatch0.check
new file mode 100644
index 0000000000..36806909d0
--- /dev/null
+++ b/test/continuations/run/trycatch0.check
@@ -0,0 +1,2 @@
+10
+10 \ No newline at end of file
diff --git a/test/continuations/run/trycatch0.scala b/test/continuations/run/trycatch0.scala
new file mode 100644
index 0000000000..74a078b5ef
--- /dev/null
+++ b/test/continuations/run/trycatch0.scala
@@ -0,0 +1,25 @@
+// $Id$
+
+import scala.util.continuations._
+
+object Test {
+
+ def foo = try {
+ shift((k: Int=>Int) => k(7))
+ } catch {
+ case ex =>
+ 9
+ }
+
+ def bar = try {
+ 7
+ } catch {
+ case ex =>
+ shiftUnit0[Int,Int](9)
+ }
+
+ def main(args: Array[String]): Unit = {
+ println(reset { foo + 3 })
+ println(reset { bar + 3 })
+ }
+} \ No newline at end of file
diff --git a/test/continuations/run/trycatch1.check b/test/continuations/run/trycatch1.check
new file mode 100644
index 0000000000..a028d2b1e1
--- /dev/null
+++ b/test/continuations/run/trycatch1.check
@@ -0,0 +1,4 @@
+12
+12
+12
+12 \ No newline at end of file
diff --git a/test/continuations/run/trycatch1.scala b/test/continuations/run/trycatch1.scala
new file mode 100644
index 0000000000..ade13794e3
--- /dev/null
+++ b/test/continuations/run/trycatch1.scala
@@ -0,0 +1,48 @@
+// $Id$
+
+import scala.util.continuations._
+
+object Test {
+
+ def fatal: Int = throw new Exception()
+
+ def foo1 = try {
+ fatal
+ shift((k: Int=>Int) => k(7))
+ } catch {
+ case ex =>
+ 9
+ }
+
+ def foo2 = try {
+ shift((k: Int=>Int) => k(7))
+ fatal
+ } catch {
+ case ex =>
+ 9
+ }
+
+ def bar1 = try {
+ fatal
+ 7
+ } catch {
+ case ex =>
+ shiftUnit0[Int,Int](9) // regular shift causes no-symbol doesn't have owner
+ }
+
+ def bar2 = try {
+ 7
+ fatal
+ } catch {
+ case ex =>
+ shiftUnit0[Int,Int](9) // regular shift causes no-symbol doesn't have owner
+ }
+
+ def main(args: Array[String]): Unit = {
+ println(reset { foo1 + 3 })
+ println(reset { foo2 + 3 })
+ println(reset { bar1 + 3 })
+ println(reset { bar2 + 3 })
+ }
+
+} \ No newline at end of file
diff --git a/test/continuations/run/while0.check b/test/continuations/run/while0.check
new file mode 100644
index 0000000000..d58c55a31d
--- /dev/null
+++ b/test/continuations/run/while0.check
@@ -0,0 +1 @@
+9000
diff --git a/test/continuations/run/while0.scala b/test/continuations/run/while0.scala
new file mode 100644
index 0000000000..9735f9d2c3
--- /dev/null
+++ b/test/continuations/run/while0.scala
@@ -0,0 +1,22 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+
+ def foo(): Int @cps[Unit] = 2
+
+ def test(): Unit @cps[Unit] = {
+ var x = 0
+ while (x < 9000) { // pick number large enough to require tail-call opt
+ x += foo()
+ }
+ println(x)
+ }
+
+ def main(args: Array[String]): Any = {
+ reset(test())
+ }
+
+} \ No newline at end of file
diff --git a/test/continuations/run/while1.check b/test/continuations/run/while1.check
new file mode 100644
index 0000000000..3d5f0b9a46
--- /dev/null
+++ b/test/continuations/run/while1.check
@@ -0,0 +1,11 @@
+up
+up
+up
+up
+up
+10
+down
+down
+down
+down
+down
diff --git a/test/continuations/run/while1.scala b/test/continuations/run/while1.scala
new file mode 100644
index 0000000000..fb5dc0079a
--- /dev/null
+++ b/test/continuations/run/while1.scala
@@ -0,0 +1,22 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+
+ def foo(): Int @cps[Unit] = shift { k => println("up"); k(2); println("down") }
+
+ def test(): Unit @cps[Unit] = {
+ var x = 0
+ while (x < 9) {
+ x += foo()
+ }
+ println(x)
+ }
+
+ def main(args: Array[String]): Any = {
+ reset(test())
+ }
+
+} \ No newline at end of file
diff --git a/test/continuations/run/while2.check b/test/continuations/run/while2.check
new file mode 100644
index 0000000000..9fe515181b
--- /dev/null
+++ b/test/continuations/run/while2.check
@@ -0,0 +1,19 @@
+up
+up
+up
+up
+up
+up
+up
+up
+up
+9000
+down
+down
+down
+down
+down
+down
+down
+down
+down
diff --git a/test/continuations/run/while2.scala b/test/continuations/run/while2.scala
new file mode 100644
index 0000000000..f36288929e
--- /dev/null
+++ b/test/continuations/run/while2.scala
@@ -0,0 +1,23 @@
+// $Id$
+
+import scala.util.continuations._
+
+
+object Test {
+
+ def foo1(): Int @cps[Unit] = 2
+ def foo2(): Int @cps[Unit] = shift { k => println("up"); k(2); println("down") }
+
+ def test(): Unit @cps[Unit] = {
+ var x = 0
+ while (x < 9000) { // pick number large enough to require tail-call opt
+ x += (if (x % 1000 != 0) foo1() else foo2())
+ }
+ println(x)
+ }
+
+ def main(args: Array[String]): Any = {
+ reset(test())
+ }
+
+} \ No newline at end of file
diff --git a/test/debug/buildmanager/.gitignore b/test/debug/buildmanager/.gitignore
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/test/debug/buildmanager/.gitignore
diff --git a/test/debug/jvm/.gitignore b/test/debug/jvm/.gitignore
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/test/debug/jvm/.gitignore
diff --git a/test/debug/neg/.gitignore b/test/debug/neg/.gitignore
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/test/debug/neg/.gitignore
diff --git a/test/debug/pos/.gitignore b/test/debug/pos/.gitignore
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/test/debug/pos/.gitignore
diff --git a/test/debug/res/.gitignore b/test/debug/res/.gitignore
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/test/debug/res/.gitignore
diff --git a/test/debug/run/.gitignore b/test/debug/run/.gitignore
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/test/debug/run/.gitignore
diff --git a/test/debug/scalacheck/.gitignore b/test/debug/scalacheck/.gitignore
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/test/debug/scalacheck/.gitignore
diff --git a/test/debug/scalap/.gitignore b/test/debug/scalap/.gitignore
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/test/debug/scalap/.gitignore
diff --git a/test/debug/shootout/.gitignore b/test/debug/shootout/.gitignore
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/test/debug/shootout/.gitignore
diff --git a/test/files/script/loadAndExecute/loadAndExecute.check b/test/disabled-windows/script/loadAndExecute.check
index ccd8cd6e37..ccd8cd6e37 100644
--- a/test/files/script/loadAndExecute/loadAndExecute.check
+++ b/test/disabled-windows/script/loadAndExecute.check
diff --git a/test/files/script/loadAndExecute/lAndE1.scala b/test/disabled-windows/script/loadAndExecute/lAndE1.scala
index b20d1a9428..b20d1a9428 100755
--- a/test/files/script/loadAndExecute/lAndE1.scala
+++ b/test/disabled-windows/script/loadAndExecute/lAndE1.scala
diff --git a/test/files/script/loadAndExecute/lAndE2.scala b/test/disabled-windows/script/loadAndExecute/lAndE2.scala
index ea15a04d86..ea15a04d86 100755
--- a/test/files/script/loadAndExecute/lAndE2.scala
+++ b/test/disabled-windows/script/loadAndExecute/lAndE2.scala
diff --git a/test/files/script/loadAndExecute/loadAndExecute.scala b/test/disabled-windows/script/loadAndExecute/loadAndExecute.scala
index 2a9718382b..2a9718382b 100755
--- a/test/files/script/loadAndExecute/loadAndExecute.scala
+++ b/test/disabled-windows/script/loadAndExecute/loadAndExecute.scala
diff --git a/test/files/script/utf8.bat b/test/disabled-windows/script/utf8.bat
index 8bc5c886f7..8bc5c886f7 100755
--- a/test/files/script/utf8.bat
+++ b/test/disabled-windows/script/utf8.bat
diff --git a/test/files/script/utf8.check b/test/disabled-windows/script/utf8.check
index 29dc0518ff..29dc0518ff 100644
--- a/test/files/script/utf8.check
+++ b/test/disabled-windows/script/utf8.check
diff --git a/test/files/script/utf8.scala b/test/disabled-windows/script/utf8.scala
index 5366562cee..f294498cd2 100755
--- a/test/files/script/utf8.scala
+++ b/test/disabled-windows/script/utf8.scala
@@ -1,6 +1,6 @@
#!/bin/sh
-# fact - A simple Scala script that prints out the factorial of
-# the argument specified on the command line.
+#
+# Checks if UTF-8 output makes it through unmangled.
cygwin=false;
case "`uname`" in
@@ -18,8 +18,7 @@ then
SOURCE=`cygpath --$format "$SOURCE"`;
fi
-export LC_CTYPE=en_US.UTF-8
-exec scala -nocompdaemon "$SOURCE" "$@"
+exec scala -Dfile.encoding="UTF-8" -nocompdaemon "$SOURCE" "$@"
!#
/*Comment Комментарий*/
diff --git a/test/disabled/buildmanager/t2651_1/A.scala b/test/disabled/buildmanager/t2651_1/A.scala
new file mode 100644
index 0000000000..d712f6febe
--- /dev/null
+++ b/test/disabled/buildmanager/t2651_1/A.scala
@@ -0,0 +1 @@
+trait A[T]
diff --git a/test/disabled/buildmanager/t2651_1/B.scala b/test/disabled/buildmanager/t2651_1/B.scala
new file mode 100644
index 0000000000..a8aca3d0ed
--- /dev/null
+++ b/test/disabled/buildmanager/t2651_1/B.scala
@@ -0,0 +1,2 @@
+trait B[T] extends A[T]
+
diff --git a/test/disabled/buildmanager/t2651_1/C.scala b/test/disabled/buildmanager/t2651_1/C.scala
new file mode 100644
index 0000000000..690dcf518d
--- /dev/null
+++ b/test/disabled/buildmanager/t2651_1/C.scala
@@ -0,0 +1,3 @@
+object C {
+ new A[Int] {}
+}
diff --git a/test/disabled/buildmanager/t2651_1/D.scala b/test/disabled/buildmanager/t2651_1/D.scala
new file mode 100644
index 0000000000..51273ad986
--- /dev/null
+++ b/test/disabled/buildmanager/t2651_1/D.scala
@@ -0,0 +1,3 @@
+object D {
+ def x[T](a: A[T]) = a
+}
diff --git a/test/disabled/buildmanager/t2651_1/t2651_1.changes/A2.scala b/test/disabled/buildmanager/t2651_1/t2651_1.changes/A2.scala
new file mode 100644
index 0000000000..574b522149
--- /dev/null
+++ b/test/disabled/buildmanager/t2651_1/t2651_1.changes/A2.scala
@@ -0,0 +1,2 @@
+trait A
+
diff --git a/test/disabled/buildmanager/t2651_1/t2651_1.check b/test/disabled/buildmanager/t2651_1/t2651_1.check
new file mode 100644
index 0000000000..8d2cbc8194
--- /dev/null
+++ b/test/disabled/buildmanager/t2651_1/t2651_1.check
@@ -0,0 +1,19 @@
+builder > A.scala B.scala C.scala D.scala
+compiling Set(A.scala, B.scala, C.scala, D.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(trait A -> List(Changed(Class(A))[ tparams: List()]))
+invalidate B.scala because parents have changed [Changed(Class(A))[ tparams: List()]]
+invalidate C.scala because parents have changed [Changed(Class(A))[ tparams: List()]]
+invalidate D.scala because it references changed class [Changed(Class(A))[ tparams: List()]]
+compiling Set(B.scala, C.scala, D.scala)
+B.scala:1: error: A does not take type parameters
+trait B[T] extends A[T]
+ ^
+C.scala:2: error: A does not take type parameters
+ new A[Int] {}
+ ^
+D.scala:2: error: A does not take type parameters
+ def x[T](a: A[T]) = a
+ ^
diff --git a/test/disabled/buildmanager/t2651_1/t2651_1.test b/test/disabled/buildmanager/t2651_1/t2651_1.test
new file mode 100644
index 0000000000..4f67d5e233
--- /dev/null
+++ b/test/disabled/buildmanager/t2651_1/t2651_1.test
@@ -0,0 +1,3 @@
+>>compile A.scala B.scala C.scala D.scala
+>>update A.scala=>A2.scala
+>>compile A.scala
diff --git a/test/files/jvm/JavaInteraction.check b/test/disabled/jvm/JavaInteraction.check
index fb9d3cdd8c..fb9d3cdd8c 100644
--- a/test/files/jvm/JavaInteraction.check
+++ b/test/disabled/jvm/JavaInteraction.check
diff --git a/test/files/jvm/JavaInteraction.scala b/test/disabled/jvm/JavaInteraction.scala
index 1316fad5d4..1316fad5d4 100644
--- a/test/files/jvm/JavaInteraction.scala
+++ b/test/disabled/jvm/JavaInteraction.scala
diff --git a/test/pending/pos/t1053.scala b/test/disabled/pos/t1053.scala
index 1d4dfb637e..1d4dfb637e 100644
--- a/test/pending/pos/t1053.scala
+++ b/test/disabled/pos/t1053.scala
diff --git a/test/pending/pos/t2619.scala b/test/disabled/pos/t2619.scala
index 565bc9572b..565bc9572b 100644
--- a/test/pending/pos/t2619.scala
+++ b/test/disabled/pos/t2619.scala
diff --git a/test/pending/pos/ticket2251.scala b/test/disabled/pos/ticket2251.scala
index 7b6efb0ea0..7b6efb0ea0 100644
--- a/test/pending/pos/ticket2251.scala
+++ b/test/disabled/pos/ticket2251.scala
diff --git a/test/disabled/run/docgenerator.scala b/test/disabled/run/docgenerator.scala
index 59f90ba415..8e0c7589ad 100644
--- a/test/disabled/run/docgenerator.scala
+++ b/test/disabled/run/docgenerator.scala
@@ -10,7 +10,7 @@ object Test {
def main(args: Array[String]) {
// overwrites value of UrlContext.generator in file DocUtil.scala
System.setProperty("doc.generator", "scaladoc")
- var dirname = System.getProperty("scalatest.output")
+ var dirname = System.getProperty("partest.output")
if (dirname eq null) dirname = System.getProperty("java.io.tmpdir")
val tmpDir = new File(dirname)
tmpDir.mkdirs()
@@ -116,7 +116,7 @@ object Foo2 {
// when running that compiler, give it a scala-library to the classpath
docSettings.classpath.value = System.getProperty("java.class.path")
reporter = new ConsoleReporter(docSettings)
- val command = new CompilerCommand(args.toList, docSettings, error, false)
+ val command = new CompilerCommand(args.toList, docSettings)
try {
object compiler extends Global(command.settings, reporter) {
override protected def computeInternalPhases() : Unit = {
diff --git a/test/files/bench/equality/eqeq.log b/test/files/bench/equality/eqeq.eqlog
index d1e27aceed..d1e27aceed 100755..100644
--- a/test/files/bench/equality/eqeq.log
+++ b/test/files/bench/equality/eqeq.eqlog
diff --git a/test/files/buildmanager/annotated/A.scala b/test/files/buildmanager/annotated/A.scala
new file mode 100644
index 0000000000..4130cf21ec
--- /dev/null
+++ b/test/files/buildmanager/annotated/A.scala
@@ -0,0 +1 @@
+case class A[T](x: String, y: T)
diff --git a/test/files/buildmanager/annotated/annotated.check b/test/files/buildmanager/annotated/annotated.check
new file mode 100644
index 0000000000..ce92c9a294
--- /dev/null
+++ b/test/files/buildmanager/annotated/annotated.check
@@ -0,0 +1,6 @@
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(class A -> List(), object A -> List())
diff --git a/test/files/buildmanager/annotated/annotated.test b/test/files/buildmanager/annotated/annotated.test
new file mode 100644
index 0000000000..392e0d365f
--- /dev/null
+++ b/test/files/buildmanager/annotated/annotated.test
@@ -0,0 +1,2 @@
+>>compile A.scala
+>>compile A.scala
diff --git a/test/files/buildmanager/freshnames/A.scala b/test/files/buildmanager/freshnames/A.scala
new file mode 100644
index 0000000000..88ea44b3c7
--- /dev/null
+++ b/test/files/buildmanager/freshnames/A.scala
@@ -0,0 +1,16 @@
+abstract class A {
+
+ var t: List[B]
+
+ def foo(n: String): Option[B] = {
+ t.reverse find (_.names contains n)
+ }
+
+ def bar(n: Int): Option[B] = {
+ t.reverse find (_.names contains n)
+ }
+}
+
+//class A
+case class B(names: List[String])
+
diff --git a/test/files/buildmanager/freshnames/B.scala b/test/files/buildmanager/freshnames/B.scala
new file mode 100644
index 0000000000..d700225c08
--- /dev/null
+++ b/test/files/buildmanager/freshnames/B.scala
@@ -0,0 +1,4 @@
+abstract class C extends A {
+ def test(n: Int) = bar(n)
+}
+
diff --git a/test/files/buildmanager/freshnames/freshnames.check b/test/files/buildmanager/freshnames/freshnames.check
new file mode 100644
index 0000000000..9f05fb8a36
--- /dev/null
+++ b/test/files/buildmanager/freshnames/freshnames.check
@@ -0,0 +1,6 @@
+builder > B.scala A.scala
+compiling Set(A.scala, B.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(class A -> List(), class B -> List(), object B -> List())
diff --git a/test/files/buildmanager/freshnames/freshnames.test b/test/files/buildmanager/freshnames/freshnames.test
new file mode 100644
index 0000000000..20b20298f9
--- /dev/null
+++ b/test/files/buildmanager/freshnames/freshnames.test
@@ -0,0 +1,2 @@
+>>compile B.scala A.scala
+>>compile A.scala
diff --git a/test/files/buildmanager/infer/A.scala b/test/files/buildmanager/infer/A.scala
new file mode 100644
index 0000000000..5e0e268122
--- /dev/null
+++ b/test/files/buildmanager/infer/A.scala
@@ -0,0 +1,16 @@
+class Foo(flag: Boolean) {
+ val classpath =
+ if (flag)
+ new AClasspath
+ else
+ new BClasspath
+}
+
+class AClasspath extends MergedClasspath[A]
+
+class BClasspath extends MergedClasspath[B]
+
+abstract class MergedClasspath[T]
+
+class A
+class B
diff --git a/test/files/buildmanager/infer/infer.check b/test/files/buildmanager/infer/infer.check
new file mode 100644
index 0000000000..1f736977ff
--- /dev/null
+++ b/test/files/buildmanager/infer/infer.check
@@ -0,0 +1,6 @@
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(class A -> List(), class AClasspath -> List(), class B -> List(), class BClasspath -> List(), class Foo -> List(), class MergedClasspath -> List())
diff --git a/test/files/buildmanager/infer/infer.test b/test/files/buildmanager/infer/infer.test
new file mode 100644
index 0000000000..392e0d365f
--- /dev/null
+++ b/test/files/buildmanager/infer/infer.test
@@ -0,0 +1,2 @@
+>>compile A.scala
+>>compile A.scala
diff --git a/test/files/buildmanager/overloaded_1/A.scala b/test/files/buildmanager/overloaded_1/A.scala
new file mode 100644
index 0000000000..c070faf978
--- /dev/null
+++ b/test/files/buildmanager/overloaded_1/A.scala
@@ -0,0 +1,11 @@
+trait As {
+ trait C extends D {
+ override def foo = this /// Shouldn't cause the change
+ override def foo(act: List[D]) = this
+ }
+
+ abstract class D{
+ def foo: D = this
+ def foo(act: List[D]) = this
+ }
+}
diff --git a/test/files/buildmanager/overloaded_1/overloaded_1.check b/test/files/buildmanager/overloaded_1/overloaded_1.check
new file mode 100644
index 0000000000..4d643ce6b4
--- /dev/null
+++ b/test/files/buildmanager/overloaded_1/overloaded_1.check
@@ -0,0 +1,6 @@
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(class As$D -> List(), object As$C$class -> List(), object As$class -> List(), trait As -> List(), trait As$C -> List())
diff --git a/test/files/buildmanager/overloaded_1/overloaded_1.test b/test/files/buildmanager/overloaded_1/overloaded_1.test
new file mode 100644
index 0000000000..392e0d365f
--- /dev/null
+++ b/test/files/buildmanager/overloaded_1/overloaded_1.test
@@ -0,0 +1,2 @@
+>>compile A.scala
+>>compile A.scala
diff --git a/test/files/buildmanager/simpletest/A.scala b/test/files/buildmanager/simpletest/A.scala
new file mode 100644
index 0000000000..ef704706bb
--- /dev/null
+++ b/test/files/buildmanager/simpletest/A.scala
@@ -0,0 +1,3 @@
+class A {
+ def foo = 2
+}
diff --git a/test/files/buildmanager/simpletest/B.scala b/test/files/buildmanager/simpletest/B.scala
new file mode 100644
index 0000000000..364dc6e4cb
--- /dev/null
+++ b/test/files/buildmanager/simpletest/B.scala
@@ -0,0 +1,3 @@
+class B extends A {
+ override def foo = 2
+}
diff --git a/test/files/buildmanager/simpletest/simpletest.changes/A1.scala b/test/files/buildmanager/simpletest/simpletest.changes/A1.scala
new file mode 100644
index 0000000000..83d15dc739
--- /dev/null
+++ b/test/files/buildmanager/simpletest/simpletest.changes/A1.scala
@@ -0,0 +1 @@
+class A
diff --git a/test/files/buildmanager/simpletest/simpletest.check b/test/files/buildmanager/simpletest/simpletest.check
new file mode 100644
index 0000000000..95ea2c4c0d
--- /dev/null
+++ b/test/files/buildmanager/simpletest/simpletest.check
@@ -0,0 +1,11 @@
+builder > A.scala B.scala
+compiling Set(A.scala, B.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(class A -> List(Removed(Definition(A.foo))))
+invalidate B.scala because inherited method removed [Removed(Definition(A.foo))]
+compiling Set(B.scala)
+B.scala:2: error: method foo overrides nothing
+ override def foo = 2
+ ^
diff --git a/test/files/buildmanager/simpletest/simpletest.test b/test/files/buildmanager/simpletest/simpletest.test
new file mode 100644
index 0000000000..2c0be1502f
--- /dev/null
+++ b/test/files/buildmanager/simpletest/simpletest.test
@@ -0,0 +1,3 @@
+>>compile A.scala B.scala
+>>update A.scala=>A1.scala
+>>compile A.scala
diff --git a/test/files/buildmanager/t2280/A.scala b/test/files/buildmanager/t2280/A.scala
new file mode 100644
index 0000000000..5febadeb06
--- /dev/null
+++ b/test/files/buildmanager/t2280/A.scala
@@ -0,0 +1 @@
+class A extends B
diff --git a/test/files/buildmanager/t2280/B.java b/test/files/buildmanager/t2280/B.java
new file mode 100644
index 0000000000..aef8e106e9
--- /dev/null
+++ b/test/files/buildmanager/t2280/B.java
@@ -0,0 +1,2 @@
+public class B {}
+
diff --git a/test/files/buildmanager/t2280/t2280.check b/test/files/buildmanager/t2280/t2280.check
new file mode 100644
index 0000000000..7ea7511c63
--- /dev/null
+++ b/test/files/buildmanager/t2280/t2280.check
@@ -0,0 +1,6 @@
+builder > A.scala B.java
+compiling Set(A.scala, B.java)
+Changes: Map()
+builder > B.java
+compiling Set(B.java)
+Changes: Map(class B -> List())
diff --git a/test/files/buildmanager/t2280/t2280.test b/test/files/buildmanager/t2280/t2280.test
new file mode 100644
index 0000000000..2eda777853
--- /dev/null
+++ b/test/files/buildmanager/t2280/t2280.test
@@ -0,0 +1,2 @@
+>>compile A.scala B.java
+>>compile B.java
diff --git a/test/files/buildmanager/t2556_1/A.scala b/test/files/buildmanager/t2556_1/A.scala
new file mode 100644
index 0000000000..c6e200b217
--- /dev/null
+++ b/test/files/buildmanager/t2556_1/A.scala
@@ -0,0 +1,3 @@
+class A {
+ def x(i: Int) = i+"3"
+}
diff --git a/test/files/buildmanager/t2556_1/B.scala b/test/files/buildmanager/t2556_1/B.scala
new file mode 100644
index 0000000000..8529587b56
--- /dev/null
+++ b/test/files/buildmanager/t2556_1/B.scala
@@ -0,0 +1,3 @@
+class B extends A {
+ def x(s: String) = s+"5"
+}
diff --git a/test/files/buildmanager/t2556_1/t2556_1.changes/A2.scala b/test/files/buildmanager/t2556_1/t2556_1.changes/A2.scala
new file mode 100644
index 0000000000..4ac1045e13
--- /dev/null
+++ b/test/files/buildmanager/t2556_1/t2556_1.changes/A2.scala
@@ -0,0 +1,4 @@
+class A {
+ def x(i: String) = i+"3"
+}
+
diff --git a/test/files/buildmanager/t2556_1/t2556_1.check b/test/files/buildmanager/t2556_1/t2556_1.check
new file mode 100644
index 0000000000..dc9437fa7e
--- /dev/null
+++ b/test/files/buildmanager/t2556_1/t2556_1.check
@@ -0,0 +1,12 @@
+builder > A.scala B.scala
+compiling Set(A.scala, B.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(class A -> List(Changed(Definition(A.x))[method x changed from (i: Int)java.lang.String to (i: java.lang.String)java.lang.String flags: <method>]))
+invalidate B.scala because inherited method changed [Changed(Definition(A.x))[method x changed from (i: Int)java.lang.String to (i: java.lang.String)java.lang.String flags: <method>]]
+compiling Set(B.scala)
+B.scala:2: error: overriding method x in class A of type (i: String)java.lang.String;
+ method x needs `override' modifier
+ def x(s: String) = s+"5"
+ ^
diff --git a/test/files/buildmanager/t2556_1/t2556_1.test b/test/files/buildmanager/t2556_1/t2556_1.test
new file mode 100644
index 0000000000..6f3bd03361
--- /dev/null
+++ b/test/files/buildmanager/t2556_1/t2556_1.test
@@ -0,0 +1,3 @@
+>>compile A.scala B.scala
+>>update A.scala=>A2.scala
+>>compile A.scala
diff --git a/test/files/buildmanager/t2556_2/A.scala b/test/files/buildmanager/t2556_2/A.scala
new file mode 100644
index 0000000000..b8da5c8fb1
--- /dev/null
+++ b/test/files/buildmanager/t2556_2/A.scala
@@ -0,0 +1,4 @@
+class A {
+ def x(i: Int) = i+"3"
+}
+
diff --git a/test/files/buildmanager/t2556_2/B.scala b/test/files/buildmanager/t2556_2/B.scala
new file mode 100644
index 0000000000..80ff25d0ca
--- /dev/null
+++ b/test/files/buildmanager/t2556_2/B.scala
@@ -0,0 +1,2 @@
+class B extends A
+
diff --git a/test/files/buildmanager/t2556_2/C.scala b/test/files/buildmanager/t2556_2/C.scala
new file mode 100644
index 0000000000..0ab13e3757
--- /dev/null
+++ b/test/files/buildmanager/t2556_2/C.scala
@@ -0,0 +1,4 @@
+class C extends B {
+ def x(s: String) = s+"5"
+}
+
diff --git a/test/files/buildmanager/t2556_2/t2556_2.changes/A2.scala b/test/files/buildmanager/t2556_2/t2556_2.changes/A2.scala
new file mode 100644
index 0000000000..4ac1045e13
--- /dev/null
+++ b/test/files/buildmanager/t2556_2/t2556_2.changes/A2.scala
@@ -0,0 +1,4 @@
+class A {
+ def x(i: String) = i+"3"
+}
+
diff --git a/test/files/buildmanager/t2556_2/t2556_2.check b/test/files/buildmanager/t2556_2/t2556_2.check
new file mode 100644
index 0000000000..a4d6724b11
--- /dev/null
+++ b/test/files/buildmanager/t2556_2/t2556_2.check
@@ -0,0 +1,13 @@
+builder > A.scala B.scala C.scala
+compiling Set(A.scala, B.scala, C.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(class A -> List(Changed(Definition(A.x))[method x changed from (i: Int)java.lang.String to (i: java.lang.String)java.lang.String flags: <method>]))
+invalidate B.scala because inherited method changed [Changed(Definition(A.x))[method x changed from (i: Int)java.lang.String to (i: java.lang.String)java.lang.String flags: <method>]]
+invalidate C.scala because inherited method changed [Changed(Definition(A.x))[method x changed from (i: Int)java.lang.String to (i: java.lang.String)java.lang.String flags: <method>]]
+compiling Set(B.scala, C.scala)
+C.scala:2: error: overriding method x in class A of type (i: String)java.lang.String;
+ method x needs `override' modifier
+ def x(s: String) = s+"5"
+ ^
diff --git a/test/files/buildmanager/t2556_2/t2556_2.test b/test/files/buildmanager/t2556_2/t2556_2.test
new file mode 100644
index 0000000000..9f31bb6409
--- /dev/null
+++ b/test/files/buildmanager/t2556_2/t2556_2.test
@@ -0,0 +1,3 @@
+>>compile A.scala B.scala C.scala
+>>update A.scala=>A2.scala
+>>compile A.scala
diff --git a/test/files/buildmanager/t2556_3/A.scala b/test/files/buildmanager/t2556_3/A.scala
new file mode 100644
index 0000000000..089a05f493
--- /dev/null
+++ b/test/files/buildmanager/t2556_3/A.scala
@@ -0,0 +1,5 @@
+class A {
+ def x = 3
+}
+class B extends A
+
diff --git a/test/files/buildmanager/t2556_3/B.scala b/test/files/buildmanager/t2556_3/B.scala
new file mode 100644
index 0000000000..0ec5ae4b55
--- /dev/null
+++ b/test/files/buildmanager/t2556_3/B.scala
@@ -0,0 +1,5 @@
+object E {
+ def main(args: Array[String]) =
+ println( (new C).x )
+}
+
diff --git a/test/files/buildmanager/t2556_3/C.scala b/test/files/buildmanager/t2556_3/C.scala
new file mode 100644
index 0000000000..403df8455e
--- /dev/null
+++ b/test/files/buildmanager/t2556_3/C.scala
@@ -0,0 +1,2 @@
+class C extends B
+
diff --git a/test/files/buildmanager/t2556_3/t2556_3.changes/A2.scala b/test/files/buildmanager/t2556_3/t2556_3.changes/A2.scala
new file mode 100644
index 0000000000..21cb2779f9
--- /dev/null
+++ b/test/files/buildmanager/t2556_3/t2556_3.changes/A2.scala
@@ -0,0 +1,5 @@
+class A {
+ def x = 3
+}
+class B
+
diff --git a/test/files/buildmanager/t2556_3/t2556_3.check b/test/files/buildmanager/t2556_3/t2556_3.check
new file mode 100644
index 0000000000..af0c63eebc
--- /dev/null
+++ b/test/files/buildmanager/t2556_3/t2556_3.check
@@ -0,0 +1,18 @@
+builder > A.scala B.scala C.scala
+compiling Set(A.scala, B.scala, C.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(class A -> List(), class B -> List(Changed(Class(B))[List((A,java.lang.Object), (ScalaObject,ScalaObject))]))
+invalidate C.scala because parents have changed [Changed(Class(B))[List((A,java.lang.Object), (ScalaObject,ScalaObject))]]
+invalidate B.scala because it references invalid (no longer inherited) definition [ParentChanged(Class(C))]
+compiling Set(B.scala, C.scala)
+B.scala:3: error: type mismatch;
+ found : C
+ required: ?{val x: ?}
+Note that implicit conversions are not applicable because they are ambiguous:
+ both method any2ArrowAssoc in object Predef of type [A](x: A)ArrowAssoc[A]
+ and method any2Ensuring in object Predef of type [A](x: A)Ensuring[A]
+ are possible conversion functions from C to ?{val x: ?}
+ println( (new C).x )
+ ^
diff --git a/test/files/buildmanager/t2556_3/t2556_3.test b/test/files/buildmanager/t2556_3/t2556_3.test
new file mode 100644
index 0000000000..9f31bb6409
--- /dev/null
+++ b/test/files/buildmanager/t2556_3/t2556_3.test
@@ -0,0 +1,3 @@
+>>compile A.scala B.scala C.scala
+>>update A.scala=>A2.scala
+>>compile A.scala
diff --git a/test/files/buildmanager/t2557/A.scala b/test/files/buildmanager/t2557/A.scala
new file mode 100644
index 0000000000..3be55f19a6
--- /dev/null
+++ b/test/files/buildmanager/t2557/A.scala
@@ -0,0 +1,4 @@
+trait A {
+ def x = 3
+}
+
diff --git a/test/files/buildmanager/t2557/B.scala b/test/files/buildmanager/t2557/B.scala
new file mode 100644
index 0000000000..ea86a90079
--- /dev/null
+++ b/test/files/buildmanager/t2557/B.scala
@@ -0,0 +1,4 @@
+trait B extends A {
+ override def x = super.x * 2
+}
+
diff --git a/test/files/buildmanager/t2557/C.scala b/test/files/buildmanager/t2557/C.scala
new file mode 100644
index 0000000000..dd575ac38d
--- /dev/null
+++ b/test/files/buildmanager/t2557/C.scala
@@ -0,0 +1,3 @@
+trait C extends A {
+ override def x = super.x + 5
+}
diff --git a/test/files/buildmanager/t2557/D.scala b/test/files/buildmanager/t2557/D.scala
new file mode 100644
index 0000000000..4e662a80ce
--- /dev/null
+++ b/test/files/buildmanager/t2557/D.scala
@@ -0,0 +1 @@
+trait D extends C with B
diff --git a/test/files/buildmanager/t2557/E.scala b/test/files/buildmanager/t2557/E.scala
new file mode 100644
index 0000000000..2aee552675
--- /dev/null
+++ b/test/files/buildmanager/t2557/E.scala
@@ -0,0 +1 @@
+trait E extends D
diff --git a/test/files/buildmanager/t2557/F.scala b/test/files/buildmanager/t2557/F.scala
new file mode 100644
index 0000000000..e1996704e7
--- /dev/null
+++ b/test/files/buildmanager/t2557/F.scala
@@ -0,0 +1,4 @@
+object F extends E {
+ def main(args: Array[String]) =
+ println(x)
+}
diff --git a/test/files/buildmanager/t2557/t2557.changes/D2.scala b/test/files/buildmanager/t2557/t2557.changes/D2.scala
new file mode 100644
index 0000000000..67295f8e6d
--- /dev/null
+++ b/test/files/buildmanager/t2557/t2557.changes/D2.scala
@@ -0,0 +1,2 @@
+trait D extends B with C
+
diff --git a/test/files/buildmanager/t2557/t2557.check b/test/files/buildmanager/t2557/t2557.check
new file mode 100644
index 0000000000..f51e801017
--- /dev/null
+++ b/test/files/buildmanager/t2557/t2557.check
@@ -0,0 +1,10 @@
+builder > A.scala B.scala C.scala D.scala E.scala F.scala
+compiling Set(A.scala, B.scala, C.scala, D.scala, E.scala, F.scala)
+Changes: Map()
+builder > D.scala
+compiling Set(D.scala)
+Changes: Map(trait D -> List(Changed(Class(D))[List((java.lang.Object,java.lang.Object), (C,B), (B,C))]))
+invalidate E.scala because parents have changed [Changed(Class(D))[List((java.lang.Object,java.lang.Object), (C,B), (B,C))]]
+invalidate F.scala because parents have changed [Changed(Class(D))[List((java.lang.Object,java.lang.Object), (C,B), (B,C))]]
+compiling Set(E.scala, F.scala)
+Changes: Map(object F -> List(), trait E -> List())
diff --git a/test/files/buildmanager/t2557/t2557.test b/test/files/buildmanager/t2557/t2557.test
new file mode 100644
index 0000000000..6b0103092f
--- /dev/null
+++ b/test/files/buildmanager/t2557/t2557.test
@@ -0,0 +1,3 @@
+>>compile A.scala B.scala C.scala D.scala E.scala F.scala
+>>update D.scala=>D2.scala
+>>compile D.scala
diff --git a/test/files/buildmanager/t2559/A.scala b/test/files/buildmanager/t2559/A.scala
new file mode 100644
index 0000000000..fb4f6e3545
--- /dev/null
+++ b/test/files/buildmanager/t2559/A.scala
@@ -0,0 +1,5 @@
+sealed trait A
+class B extends A
+class C extends A
+//class E extends A
+
diff --git a/test/files/buildmanager/t2559/D.scala b/test/files/buildmanager/t2559/D.scala
new file mode 100644
index 0000000000..4b8422db04
--- /dev/null
+++ b/test/files/buildmanager/t2559/D.scala
@@ -0,0 +1,8 @@
+object D {
+ def x(a: A) =
+ a match {
+ case _: B => ()
+ case _: C => ()
+ }
+}
+
diff --git a/test/files/buildmanager/t2559/t2559.changes/A2.scala b/test/files/buildmanager/t2559/t2559.changes/A2.scala
new file mode 100644
index 0000000000..8e90594e2c
--- /dev/null
+++ b/test/files/buildmanager/t2559/t2559.changes/A2.scala
@@ -0,0 +1,5 @@
+sealed trait A
+class B extends A
+class C extends A
+class E extends A
+
diff --git a/test/files/buildmanager/t2559/t2559.check b/test/files/buildmanager/t2559/t2559.check
new file mode 100644
index 0000000000..752278fbe8
--- /dev/null
+++ b/test/files/buildmanager/t2559/t2559.check
@@ -0,0 +1,14 @@
+builder > A.scala D.scala
+compiling Set(A.scala, D.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(class B -> List(), class C -> List(), class E -> List(Changed(Class(A))[class E extends a sealed trait A]), trait A -> List())
+invalidate D.scala because it references changed class [Changed(Class(A))[class E extends a sealed trait A]]
+compiling Set(D.scala)
+D.scala:3: warning: match is not exhaustive!
+missing combination E
+
+ a match {
+ ^
+Changes: Map(object D -> List())
diff --git a/test/files/buildmanager/t2559/t2559.test b/test/files/buildmanager/t2559/t2559.test
new file mode 100644
index 0000000000..b787c5b39f
--- /dev/null
+++ b/test/files/buildmanager/t2559/t2559.test
@@ -0,0 +1,3 @@
+>>compile A.scala D.scala
+>>update A.scala=>A2.scala
+>>compile A.scala
diff --git a/test/files/buildmanager/t2562/A.scala b/test/files/buildmanager/t2562/A.scala
new file mode 100644
index 0000000000..2208585eed
--- /dev/null
+++ b/test/files/buildmanager/t2562/A.scala
@@ -0,0 +1,7 @@
+object A
+{
+ def x0 = B.x0
+ def x1 = B.x1
+ def x2 = B.x2
+ def x3 = 3
+}
diff --git a/test/files/buildmanager/t2562/B.scala b/test/files/buildmanager/t2562/B.scala
new file mode 100644
index 0000000000..6f658954e1
--- /dev/null
+++ b/test/files/buildmanager/t2562/B.scala
@@ -0,0 +1,8 @@
+object B
+{
+ def x0 = A.x1
+ def x1 = A.x2
+ def x2 = A.x3
+}
+
+
diff --git a/test/files/buildmanager/t2562/t2562.changes/A2.scala b/test/files/buildmanager/t2562/t2562.changes/A2.scala
new file mode 100644
index 0000000000..743609eb18
--- /dev/null
+++ b/test/files/buildmanager/t2562/t2562.changes/A2.scala
@@ -0,0 +1,8 @@
+object A
+{
+ def x0 = B.x0
+ def x1 = B.x1
+ def x2 = B.x2
+ def x3 = "3"
+}
+
diff --git a/test/files/buildmanager/t2562/t2562.check b/test/files/buildmanager/t2562/t2562.check
new file mode 100644
index 0000000000..813d2735e1
--- /dev/null
+++ b/test/files/buildmanager/t2562/t2562.check
@@ -0,0 +1,12 @@
+builder > A.scala B.scala
+compiling Set(A.scala, B.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(object A -> List(Changed(Definition(A.x3))[method x3 changed from ()Int to ()java.lang.String flags: <method>]))
+invalidate B.scala because it references changed definition [Changed(Definition(A.x3))[method x3 changed from ()Int to ()java.lang.String flags: <method>]]
+compiling Set(B.scala)
+Changes: Map(object B -> List(Changed(Definition(B.x2))[method x2 changed from ()Int to ()java.lang.String flags: <method>]))
+invalidate A.scala because it references changed definition [Changed(Definition(B.x2))[method x2 changed from ()Int to ()java.lang.String flags: <method>]]
+compiling Set(A.scala, B.scala)
+Changes: Map(object A -> List(Changed(Definition(A.x0))[method x0 changed from ()Int to ()java.lang.String flags: <method>], Changed(Definition(A.x1))[method x1 changed from ()Int to ()java.lang.String flags: <method>], Changed(Definition(A.x2))[method x2 changed from ()Int to ()java.lang.String flags: <method>]), object B -> List(Changed(Definition(B.x0))[method x0 changed from ()Int to ()java.lang.String flags: <method>], Changed(Definition(B.x1))[method x1 changed from ()Int to ()java.lang.String flags: <method>]))
diff --git a/test/files/buildmanager/t2562/t2562.test b/test/files/buildmanager/t2562/t2562.test
new file mode 100644
index 0000000000..6f3bd03361
--- /dev/null
+++ b/test/files/buildmanager/t2562/t2562.test
@@ -0,0 +1,3 @@
+>>compile A.scala B.scala
+>>update A.scala=>A2.scala
+>>compile A.scala
diff --git a/test/files/buildmanager/t2649/A.scala b/test/files/buildmanager/t2649/A.scala
new file mode 100644
index 0000000000..86cc3f2c15
--- /dev/null
+++ b/test/files/buildmanager/t2649/A.scala
@@ -0,0 +1,3 @@
+object A {
+ def x(zz: Int, yy: Int) = yy - zz
+}
diff --git a/test/files/buildmanager/t2649/B.scala b/test/files/buildmanager/t2649/B.scala
new file mode 100644
index 0000000000..26c89518cb
--- /dev/null
+++ b/test/files/buildmanager/t2649/B.scala
@@ -0,0 +1,4 @@
+object B {
+ def main(args: Array[String]): Unit =
+ println( A.x(zz = 3, yy = 4) )
+}
diff --git a/test/files/buildmanager/t2649/t2649.changes/A2.scala b/test/files/buildmanager/t2649/t2649.changes/A2.scala
new file mode 100644
index 0000000000..9a6309fca3
--- /dev/null
+++ b/test/files/buildmanager/t2649/t2649.changes/A2.scala
@@ -0,0 +1,4 @@
+object A {
+ def x(yy: Int, zz: Int) = yy - zz
+}
+
diff --git a/test/files/buildmanager/t2649/t2649.check b/test/files/buildmanager/t2649/t2649.check
new file mode 100644
index 0000000000..5b698ec03f
--- /dev/null
+++ b/test/files/buildmanager/t2649/t2649.check
@@ -0,0 +1,9 @@
+builder > A.scala B.scala
+compiling Set(A.scala, B.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(object A -> List(Changed(Definition(A.x))[method x changed from (zz: Int,yy: Int)Int to (yy: Int,zz: Int)Int flags: <method>]))
+invalidate B.scala because it references changed definition [Changed(Definition(A.x))[method x changed from (zz: Int,yy: Int)Int to (yy: Int,zz: Int)Int flags: <method>]]
+compiling Set(B.scala)
+Changes: Map(object B -> List())
diff --git a/test/files/buildmanager/t2649/t2649.test b/test/files/buildmanager/t2649/t2649.test
new file mode 100644
index 0000000000..6f3bd03361
--- /dev/null
+++ b/test/files/buildmanager/t2649/t2649.test
@@ -0,0 +1,3 @@
+>>compile A.scala B.scala
+>>update A.scala=>A2.scala
+>>compile A.scala
diff --git a/test/files/buildmanager/t2650_1/A.scala b/test/files/buildmanager/t2650_1/A.scala
new file mode 100644
index 0000000000..74714a3c47
--- /dev/null
+++ b/test/files/buildmanager/t2650_1/A.scala
@@ -0,0 +1,4 @@
+trait A {
+ type S[_]
+}
+
diff --git a/test/files/buildmanager/t2650_1/B.scala b/test/files/buildmanager/t2650_1/B.scala
new file mode 100644
index 0000000000..80f0e30259
--- /dev/null
+++ b/test/files/buildmanager/t2650_1/B.scala
@@ -0,0 +1,3 @@
+trait B extends A {
+ type F = S[Int]
+}
diff --git a/test/files/buildmanager/t2650_1/t2650_1.changes/A2.scala b/test/files/buildmanager/t2650_1/t2650_1.changes/A2.scala
new file mode 100644
index 0000000000..2b8ead4ff1
--- /dev/null
+++ b/test/files/buildmanager/t2650_1/t2650_1.changes/A2.scala
@@ -0,0 +1,3 @@
+trait A {
+ type S
+}
diff --git a/test/files/buildmanager/t2650_1/t2650_1.check b/test/files/buildmanager/t2650_1/t2650_1.check
new file mode 100644
index 0000000000..ecddb33620
--- /dev/null
+++ b/test/files/buildmanager/t2650_1/t2650_1.check
@@ -0,0 +1,11 @@
+builder > A.scala B.scala
+compiling Set(A.scala, B.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(trait A -> List(Changed(Definition(A.S))[type S changed from A.this.S[_] to A.this.S flags: <deferred>]))
+invalidate B.scala because inherited method changed [Changed(Definition(A.S))[type S changed from A.this.S[_] to A.this.S flags: <deferred>]]
+compiling Set(B.scala)
+B.scala:2: error: B.this.S does not take type parameters
+ type F = S[Int]
+ ^
diff --git a/test/files/buildmanager/t2650_1/t2650_1.test b/test/files/buildmanager/t2650_1/t2650_1.test
new file mode 100644
index 0000000000..6f3bd03361
--- /dev/null
+++ b/test/files/buildmanager/t2650_1/t2650_1.test
@@ -0,0 +1,3 @@
+>>compile A.scala B.scala
+>>update A.scala=>A2.scala
+>>compile A.scala
diff --git a/test/files/buildmanager/t2650_2/A.scala b/test/files/buildmanager/t2650_2/A.scala
new file mode 100644
index 0000000000..bcea634485
--- /dev/null
+++ b/test/files/buildmanager/t2650_2/A.scala
@@ -0,0 +1,3 @@
+trait A {
+ type S = Int
+}
diff --git a/test/files/buildmanager/t2650_2/B.scala b/test/files/buildmanager/t2650_2/B.scala
new file mode 100644
index 0000000000..22a3a9a48e
--- /dev/null
+++ b/test/files/buildmanager/t2650_2/B.scala
@@ -0,0 +1,4 @@
+trait B extends A {
+ def x: S
+ def y: Int = x
+}
diff --git a/test/files/buildmanager/t2650_2/t2650_2.changes/A2.scala b/test/files/buildmanager/t2650_2/t2650_2.changes/A2.scala
new file mode 100644
index 0000000000..8274c1b62d
--- /dev/null
+++ b/test/files/buildmanager/t2650_2/t2650_2.changes/A2.scala
@@ -0,0 +1,4 @@
+trait A {
+ type S = Long
+}
+
diff --git a/test/files/buildmanager/t2650_2/t2650_2.check b/test/files/buildmanager/t2650_2/t2650_2.check
new file mode 100644
index 0000000000..7ab72fb619
--- /dev/null
+++ b/test/files/buildmanager/t2650_2/t2650_2.check
@@ -0,0 +1,13 @@
+builder > A.scala B.scala
+compiling Set(A.scala, B.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(trait A -> List(Changed(Definition(A.S))[type S changed from A.this.S to A.this.S flags: ]))
+invalidate B.scala because inherited method changed [Changed(Definition(A.S))[type S changed from A.this.S to A.this.S flags: ]]
+compiling Set(B.scala)
+B.scala:3: error: type mismatch;
+ found : B.this.S
+ required: Int
+ def y: Int = x
+ ^
diff --git a/test/files/buildmanager/t2650_2/t2650_2.test b/test/files/buildmanager/t2650_2/t2650_2.test
new file mode 100644
index 0000000000..6f3bd03361
--- /dev/null
+++ b/test/files/buildmanager/t2650_2/t2650_2.test
@@ -0,0 +1,3 @@
+>>compile A.scala B.scala
+>>update A.scala=>A2.scala
+>>compile A.scala
diff --git a/test/files/buildmanager/t2650_3/A.scala b/test/files/buildmanager/t2650_3/A.scala
new file mode 100644
index 0000000000..cd13843eb9
--- /dev/null
+++ b/test/files/buildmanager/t2650_3/A.scala
@@ -0,0 +1,4 @@
+trait A {
+ type T = Int
+ def x: T
+}
diff --git a/test/files/buildmanager/t2650_3/B.scala b/test/files/buildmanager/t2650_3/B.scala
new file mode 100644
index 0000000000..46a8cf270a
--- /dev/null
+++ b/test/files/buildmanager/t2650_3/B.scala
@@ -0,0 +1,3 @@
+object B {
+ def x(a: A): Int = a.x
+}
diff --git a/test/files/buildmanager/t2650_3/t2650_3.changes/A2.scala b/test/files/buildmanager/t2650_3/t2650_3.changes/A2.scala
new file mode 100644
index 0000000000..e5667b2539
--- /dev/null
+++ b/test/files/buildmanager/t2650_3/t2650_3.changes/A2.scala
@@ -0,0 +1,4 @@
+trait A {
+ type T = Long
+ def x: T
+}
diff --git a/test/files/buildmanager/t2650_3/t2650_3.check b/test/files/buildmanager/t2650_3/t2650_3.check
new file mode 100644
index 0000000000..27be2f5ae8
--- /dev/null
+++ b/test/files/buildmanager/t2650_3/t2650_3.check
@@ -0,0 +1,13 @@
+builder > A.scala B.scala
+compiling Set(A.scala, B.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(trait A -> List(Changed(Definition(A.T))[type T changed from A.this.T to A.this.T flags: ]))
+invalidate B.scala because it references changed definition [Changed(Definition(A.T))[type T changed from A.this.T to A.this.T flags: ]]
+compiling Set(B.scala)
+B.scala:2: error: type mismatch;
+ found : a.T
+ required: Int
+ def x(a: A): Int = a.x
+ ^
diff --git a/test/files/buildmanager/t2650_3/t2650_3.test b/test/files/buildmanager/t2650_3/t2650_3.test
new file mode 100644
index 0000000000..6f3bd03361
--- /dev/null
+++ b/test/files/buildmanager/t2650_3/t2650_3.test
@@ -0,0 +1,3 @@
+>>compile A.scala B.scala
+>>update A.scala=>A2.scala
+>>compile A.scala
diff --git a/test/files/buildmanager/t2650_4/A.scala b/test/files/buildmanager/t2650_4/A.scala
new file mode 100644
index 0000000000..b9a519eb48
--- /dev/null
+++ b/test/files/buildmanager/t2650_4/A.scala
@@ -0,0 +1,5 @@
+trait A {
+ type T = Int
+ type T2 = T
+ def x: T2
+}
diff --git a/test/files/buildmanager/t2650_4/B.scala b/test/files/buildmanager/t2650_4/B.scala
new file mode 100644
index 0000000000..46a8cf270a
--- /dev/null
+++ b/test/files/buildmanager/t2650_4/B.scala
@@ -0,0 +1,3 @@
+object B {
+ def x(a: A): Int = a.x
+}
diff --git a/test/files/buildmanager/t2650_4/t2650_4.changes/A2.scala b/test/files/buildmanager/t2650_4/t2650_4.changes/A2.scala
new file mode 100644
index 0000000000..0220e7b7bc
--- /dev/null
+++ b/test/files/buildmanager/t2650_4/t2650_4.changes/A2.scala
@@ -0,0 +1,5 @@
+trait A {
+ type T = Long
+ type T2 = T
+ def x: T2
+}
diff --git a/test/files/buildmanager/t2650_4/t2650_4.check b/test/files/buildmanager/t2650_4/t2650_4.check
new file mode 100644
index 0000000000..ba092d013f
--- /dev/null
+++ b/test/files/buildmanager/t2650_4/t2650_4.check
@@ -0,0 +1,13 @@
+builder > A.scala B.scala
+compiling Set(A.scala, B.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(trait A -> List(Changed(Definition(A.T))[type T changed from A.this.T to A.this.T flags: ]))
+invalidate B.scala because it references changed definition [Changed(Definition(A.T))[type T changed from A.this.T to A.this.T flags: ]]
+compiling Set(B.scala)
+B.scala:2: error: type mismatch;
+ found : a.T2
+ required: Int
+ def x(a: A): Int = a.x
+ ^
diff --git a/test/files/buildmanager/t2650_4/t2650_4.test b/test/files/buildmanager/t2650_4/t2650_4.test
new file mode 100644
index 0000000000..6f3bd03361
--- /dev/null
+++ b/test/files/buildmanager/t2650_4/t2650_4.test
@@ -0,0 +1,3 @@
+>>compile A.scala B.scala
+>>update A.scala=>A2.scala
+>>compile A.scala
diff --git a/test/files/buildmanager/t2651_2/A.scala b/test/files/buildmanager/t2651_2/A.scala
new file mode 100644
index 0000000000..d712f6febe
--- /dev/null
+++ b/test/files/buildmanager/t2651_2/A.scala
@@ -0,0 +1 @@
+trait A[T]
diff --git a/test/files/buildmanager/t2651_2/t2651_2.changes/A2.scala b/test/files/buildmanager/t2651_2/t2651_2.changes/A2.scala
new file mode 100644
index 0000000000..7fb573e077
--- /dev/null
+++ b/test/files/buildmanager/t2651_2/t2651_2.changes/A2.scala
@@ -0,0 +1 @@
+trait A[S]
diff --git a/test/files/buildmanager/t2651_2/t2651_2.check b/test/files/buildmanager/t2651_2/t2651_2.check
new file mode 100644
index 0000000000..dd789b7565
--- /dev/null
+++ b/test/files/buildmanager/t2651_2/t2651_2.check
@@ -0,0 +1,6 @@
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(trait A -> List())
diff --git a/test/files/buildmanager/t2651_2/t2651_2.test b/test/files/buildmanager/t2651_2/t2651_2.test
new file mode 100644
index 0000000000..d0614473ce
--- /dev/null
+++ b/test/files/buildmanager/t2651_2/t2651_2.test
@@ -0,0 +1,3 @@
+>>compile A.scala
+>>update A.scala=>A2.scala
+>>compile A.scala
diff --git a/test/files/buildmanager/t2651_3/A.scala b/test/files/buildmanager/t2651_3/A.scala
new file mode 100644
index 0000000000..14f9e4662f
--- /dev/null
+++ b/test/files/buildmanager/t2651_3/A.scala
@@ -0,0 +1,3 @@
+trait A[T, S] {
+ def x: T
+}
diff --git a/test/files/buildmanager/t2651_3/t2651_3.changes/A2.scala b/test/files/buildmanager/t2651_3/t2651_3.changes/A2.scala
new file mode 100644
index 0000000000..51bf27d1fa
--- /dev/null
+++ b/test/files/buildmanager/t2651_3/t2651_3.changes/A2.scala
@@ -0,0 +1,3 @@
+trait A[T, S] {
+ def x: S
+}
diff --git a/test/files/buildmanager/t2651_3/t2651_3.check b/test/files/buildmanager/t2651_3/t2651_3.check
new file mode 100644
index 0000000000..d4bac196e9
--- /dev/null
+++ b/test/files/buildmanager/t2651_3/t2651_3.check
@@ -0,0 +1,6 @@
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(trait A -> List(Changed(Definition(A.x))[method x changed from ()T to ()S flags: <deferred> <method>]))
diff --git a/test/files/buildmanager/t2651_3/t2651_3.test b/test/files/buildmanager/t2651_3/t2651_3.test
new file mode 100644
index 0000000000..d0614473ce
--- /dev/null
+++ b/test/files/buildmanager/t2651_3/t2651_3.test
@@ -0,0 +1,3 @@
+>>compile A.scala
+>>update A.scala=>A2.scala
+>>compile A.scala
diff --git a/test/files/buildmanager/t2651_4/A.scala b/test/files/buildmanager/t2651_4/A.scala
new file mode 100644
index 0000000000..63f2a1643e
--- /dev/null
+++ b/test/files/buildmanager/t2651_4/A.scala
@@ -0,0 +1,5 @@
+trait A[T, S] {
+ def x: T
+ def y(a: T)
+ def z[B <: T]
+}
diff --git a/test/files/buildmanager/t2651_4/B.scala b/test/files/buildmanager/t2651_4/B.scala
new file mode 100644
index 0000000000..b33dbde676
--- /dev/null
+++ b/test/files/buildmanager/t2651_4/B.scala
@@ -0,0 +1,3 @@
+trait B extends A[Int, String] {
+ def x = 3
+}
diff --git a/test/files/buildmanager/t2651_4/t2651_4.changes/A2.scala b/test/files/buildmanager/t2651_4/t2651_4.changes/A2.scala
new file mode 100644
index 0000000000..f155129d13
--- /dev/null
+++ b/test/files/buildmanager/t2651_4/t2651_4.changes/A2.scala
@@ -0,0 +1,5 @@
+trait A[S, T] {
+ def x: T
+ def y(a: T)
+ def z[B <: T]
+}
diff --git a/test/files/buildmanager/t2651_4/t2651_4.check b/test/files/buildmanager/t2651_4/t2651_4.check
new file mode 100644
index 0000000000..c4ce382b5f
--- /dev/null
+++ b/test/files/buildmanager/t2651_4/t2651_4.check
@@ -0,0 +1,13 @@
+builder > A.scala B.scala
+compiling Set(A.scala, B.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(trait A -> List(Changed(Definition(A.x))[method x changed from ()T to ()T flags: <deferred> <method>], Changed(Definition(A.y))[method y changed from (a: T)Unit to (a: T)Unit flags: <deferred> <method>], Changed(Definition(A.z))[method z changed from [B <: T]()Unit to [B <: T]()Unit flags: <deferred> <method>]))
+invalidate B.scala because inherited method changed [Changed(Definition(A.x))[method x changed from ()T to ()T flags: <deferred> <method>]]
+compiling Set(B.scala)
+B.scala:2: error: type mismatch;
+ found : Int(3)
+ required: String
+ def x = 3
+ ^
diff --git a/test/files/buildmanager/t2651_4/t2651_4.test b/test/files/buildmanager/t2651_4/t2651_4.test
new file mode 100644
index 0000000000..6f3bd03361
--- /dev/null
+++ b/test/files/buildmanager/t2651_4/t2651_4.test
@@ -0,0 +1,3 @@
+>>compile A.scala B.scala
+>>update A.scala=>A2.scala
+>>compile A.scala
diff --git a/test/files/buildmanager/t2652/A.scala b/test/files/buildmanager/t2652/A.scala
new file mode 100644
index 0000000000..a62506e890
--- /dev/null
+++ b/test/files/buildmanager/t2652/A.scala
@@ -0,0 +1,3 @@
+class A {
+ def x[T](t: T) = t
+}
diff --git a/test/files/buildmanager/t2652/B.scala b/test/files/buildmanager/t2652/B.scala
new file mode 100644
index 0000000000..86d08f0d3d
--- /dev/null
+++ b/test/files/buildmanager/t2652/B.scala
@@ -0,0 +1,4 @@
+object B {
+ val y = (new A).x(3)
+}
+
diff --git a/test/files/buildmanager/t2652/t2652.changes/A2.scala b/test/files/buildmanager/t2652/t2652.changes/A2.scala
new file mode 100644
index 0000000000..29135c0e94
--- /dev/null
+++ b/test/files/buildmanager/t2652/t2652.changes/A2.scala
@@ -0,0 +1,4 @@
+class A {
+ def x[@specialized T](t: T) = t
+}
+
diff --git a/test/files/buildmanager/t2652/t2652.check b/test/files/buildmanager/t2652/t2652.check
new file mode 100644
index 0000000000..77a27a727f
--- /dev/null
+++ b/test/files/buildmanager/t2652/t2652.check
@@ -0,0 +1,9 @@
+builder > A.scala B.scala
+compiling Set(A.scala, B.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(class A -> List(Changed(Definition(A.x))[method x changed from [T](t: T)T to [T](t: T)T flags: <method>]))
+invalidate B.scala because it references changed definition [Changed(Definition(A.x))[method x changed from [T](t: T)T to [T](t: T)T flags: <method>]]
+compiling Set(B.scala)
+Changes: Map(object B -> List())
diff --git a/test/files/buildmanager/t2652/t2652.test b/test/files/buildmanager/t2652/t2652.test
new file mode 100644
index 0000000000..6f3bd03361
--- /dev/null
+++ b/test/files/buildmanager/t2652/t2652.test
@@ -0,0 +1,3 @@
+>>compile A.scala B.scala
+>>update A.scala=>A2.scala
+>>compile A.scala
diff --git a/test/files/buildmanager/t2653/A.scala b/test/files/buildmanager/t2653/A.scala
new file mode 100644
index 0000000000..fb17a158c7
--- /dev/null
+++ b/test/files/buildmanager/t2653/A.scala
@@ -0,0 +1,2 @@
+class A[+T]
+
diff --git a/test/files/buildmanager/t2653/B.scala b/test/files/buildmanager/t2653/B.scala
new file mode 100644
index 0000000000..8f55a88e05
--- /dev/null
+++ b/test/files/buildmanager/t2653/B.scala
@@ -0,0 +1,3 @@
+object B {
+ val a: A[Any] = new A[Int]
+}
diff --git a/test/files/buildmanager/t2653/t2653.changes/A2.scala b/test/files/buildmanager/t2653/t2653.changes/A2.scala
new file mode 100644
index 0000000000..51d13cce6e
--- /dev/null
+++ b/test/files/buildmanager/t2653/t2653.changes/A2.scala
@@ -0,0 +1,2 @@
+class A[T]
+
diff --git a/test/files/buildmanager/t2653/t2653.check b/test/files/buildmanager/t2653/t2653.check
new file mode 100644
index 0000000000..0d40601962
--- /dev/null
+++ b/test/files/buildmanager/t2653/t2653.check
@@ -0,0 +1,13 @@
+builder > A.scala B.scala
+compiling Set(A.scala, B.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(class A -> List(Changed(Class(A))[ tparams: List((type T,type T))], Changed(Definition(A.<init>))[constructor A changed from ()A[T] to ()A[T] flags: <method>]))
+invalidate B.scala because it references changed class [Changed(Class(A))[ tparams: List((type T,type T))]]
+compiling Set(B.scala)
+B.scala:2: error: type mismatch;
+ found : A[Int]
+ required: A[Any]
+ val a: A[Any] = new A[Int]
+ ^
diff --git a/test/files/buildmanager/t2653/t2653.test b/test/files/buildmanager/t2653/t2653.test
new file mode 100644
index 0000000000..6f3bd03361
--- /dev/null
+++ b/test/files/buildmanager/t2653/t2653.test
@@ -0,0 +1,3 @@
+>>compile A.scala B.scala
+>>update A.scala=>A2.scala
+>>compile A.scala
diff --git a/test/files/buildmanager/t2654/A.scala b/test/files/buildmanager/t2654/A.scala
new file mode 100644
index 0000000000..75f396d039
--- /dev/null
+++ b/test/files/buildmanager/t2654/A.scala
@@ -0,0 +1,2 @@
+class A
+
diff --git a/test/files/buildmanager/t2654/B.scala b/test/files/buildmanager/t2654/B.scala
new file mode 100644
index 0000000000..a18aec3dbe
--- /dev/null
+++ b/test/files/buildmanager/t2654/B.scala
@@ -0,0 +1 @@
+class B extends A
diff --git a/test/files/buildmanager/t2654/t2654.changes/A2.scala b/test/files/buildmanager/t2654/t2654.changes/A2.scala
new file mode 100644
index 0000000000..c302edbd85
--- /dev/null
+++ b/test/files/buildmanager/t2654/t2654.changes/A2.scala
@@ -0,0 +1,4 @@
+class A {
+ private def x = 5
+}
+
diff --git a/test/files/buildmanager/t2654/t2654.check b/test/files/buildmanager/t2654/t2654.check
new file mode 100644
index 0000000000..68f6e8efc0
--- /dev/null
+++ b/test/files/buildmanager/t2654/t2654.check
@@ -0,0 +1,6 @@
+builder > A.scala B.scala
+compiling Set(A.scala, B.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(class A -> List())
diff --git a/test/files/buildmanager/t2654/t2654.test b/test/files/buildmanager/t2654/t2654.test
new file mode 100644
index 0000000000..6f3bd03361
--- /dev/null
+++ b/test/files/buildmanager/t2654/t2654.test
@@ -0,0 +1,3 @@
+>>compile A.scala B.scala
+>>update A.scala=>A2.scala
+>>compile A.scala
diff --git a/test/files/buildmanager/t2655/A.scala b/test/files/buildmanager/t2655/A.scala
new file mode 100644
index 0000000000..b2c54ac47d
--- /dev/null
+++ b/test/files/buildmanager/t2655/A.scala
@@ -0,0 +1,4 @@
+object A {
+ def x(i: => String) = ()
+}
+
diff --git a/test/files/buildmanager/t2655/B.scala b/test/files/buildmanager/t2655/B.scala
new file mode 100644
index 0000000000..6c1918c0fb
--- /dev/null
+++ b/test/files/buildmanager/t2655/B.scala
@@ -0,0 +1,3 @@
+object B {
+ val x = A.x("3")
+}
diff --git a/test/files/buildmanager/t2655/t2655.changes/A2.scala b/test/files/buildmanager/t2655/t2655.changes/A2.scala
new file mode 100644
index 0000000000..0d6a7c69bb
--- /dev/null
+++ b/test/files/buildmanager/t2655/t2655.changes/A2.scala
@@ -0,0 +1,4 @@
+object A {
+ def x(i: Function0[String]) = ()
+}
+
diff --git a/test/files/buildmanager/t2655/t2655.check b/test/files/buildmanager/t2655/t2655.check
new file mode 100644
index 0000000000..a4a071ed70
--- /dev/null
+++ b/test/files/buildmanager/t2655/t2655.check
@@ -0,0 +1,13 @@
+builder > A.scala B.scala
+compiling Set(A.scala, B.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(object A -> List(Changed(Definition(A.x))[method x changed from (i: Function0)Unit to (i: Function0)Unit flags: <method>]))
+invalidate B.scala because it references changed definition [Changed(Definition(A.x))[method x changed from (i: Function0)Unit to (i: Function0)Unit flags: <method>]]
+compiling Set(B.scala)
+B.scala:2: error: type mismatch;
+ found : java.lang.String("3")
+ required: () => String
+ val x = A.x("3")
+ ^
diff --git a/test/files/buildmanager/t2655/t2655.test b/test/files/buildmanager/t2655/t2655.test
new file mode 100644
index 0000000000..6f3bd03361
--- /dev/null
+++ b/test/files/buildmanager/t2655/t2655.test
@@ -0,0 +1,3 @@
+>>compile A.scala B.scala
+>>update A.scala=>A2.scala
+>>compile A.scala
diff --git a/test/files/buildmanager/t2657/A.scala b/test/files/buildmanager/t2657/A.scala
new file mode 100644
index 0000000000..2a6c62d29c
--- /dev/null
+++ b/test/files/buildmanager/t2657/A.scala
@@ -0,0 +1,3 @@
+class A {
+ implicit def y(i: Int): String = i.toString
+}
diff --git a/test/files/buildmanager/t2657/B.scala b/test/files/buildmanager/t2657/B.scala
new file mode 100644
index 0000000000..77869890db
--- /dev/null
+++ b/test/files/buildmanager/t2657/B.scala
@@ -0,0 +1,4 @@
+object B extends A {
+ val x: String = 3
+}
+
diff --git a/test/files/buildmanager/t2657/t2657.changes/A2.scala b/test/files/buildmanager/t2657/t2657.changes/A2.scala
new file mode 100644
index 0000000000..7dc99d425e
--- /dev/null
+++ b/test/files/buildmanager/t2657/t2657.changes/A2.scala
@@ -0,0 +1,3 @@
+class A {
+ def y(i: Int): String = i.toString
+}
diff --git a/test/files/buildmanager/t2657/t2657.check b/test/files/buildmanager/t2657/t2657.check
new file mode 100644
index 0000000000..9713f66024
--- /dev/null
+++ b/test/files/buildmanager/t2657/t2657.check
@@ -0,0 +1,13 @@
+builder > A.scala B.scala
+compiling Set(A.scala, B.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(class A -> List(Changed(Definition(A.y))[method y changed from (i: Int)java.lang.String to (i: Int)java.lang.String flags: implicit <method>]))
+invalidate B.scala because inherited method changed [Changed(Definition(A.y))[method y changed from (i: Int)java.lang.String to (i: Int)java.lang.String flags: implicit <method>]]
+compiling Set(B.scala)
+B.scala:2: error: type mismatch;
+ found : Int(3)
+ required: String
+ val x: String = 3
+ ^
diff --git a/test/files/buildmanager/t2657/t2657.test b/test/files/buildmanager/t2657/t2657.test
new file mode 100644
index 0000000000..6f3bd03361
--- /dev/null
+++ b/test/files/buildmanager/t2657/t2657.test
@@ -0,0 +1,3 @@
+>>compile A.scala B.scala
+>>update A.scala=>A2.scala
+>>compile A.scala
diff --git a/test/files/buildmanager/t2789/A.scala b/test/files/buildmanager/t2789/A.scala
new file mode 100644
index 0000000000..08d5bc840c
--- /dev/null
+++ b/test/files/buildmanager/t2789/A.scala
@@ -0,0 +1,5 @@
+class A {
+ implicit def e: E = new E
+ def x(i: Int)(implicit y: E): String = ""
+}
+class E
diff --git a/test/files/buildmanager/t2789/B.scala b/test/files/buildmanager/t2789/B.scala
new file mode 100644
index 0000000000..dcefbeec1b
--- /dev/null
+++ b/test/files/buildmanager/t2789/B.scala
@@ -0,0 +1,3 @@
+object B extends A {
+ val y = x(3)
+}
diff --git a/test/files/buildmanager/t2789/t2789.changes/A2.scala b/test/files/buildmanager/t2789/t2789.changes/A2.scala
new file mode 100644
index 0000000000..4ba3814e71
--- /dev/null
+++ b/test/files/buildmanager/t2789/t2789.changes/A2.scala
@@ -0,0 +1,5 @@
+class A {
+ def e: E = new E
+ def x(i: Int)(implicit y: E): String = ""
+}
+class E
diff --git a/test/files/buildmanager/t2789/t2789.check b/test/files/buildmanager/t2789/t2789.check
new file mode 100644
index 0000000000..78c5119355
--- /dev/null
+++ b/test/files/buildmanager/t2789/t2789.check
@@ -0,0 +1,11 @@
+builder > A.scala B.scala
+compiling Set(A.scala, B.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(class A -> List(Changed(Definition(A.e))[method e changed from ()E to ()E flags: implicit <method>]), class E -> List())
+invalidate B.scala because inherited method changed [Changed(Definition(A.e))[method e changed from ()E to ()E flags: implicit <method>]]
+compiling Set(B.scala)
+B.scala:2: error: could not find implicit value for parameter y: E
+ val y = x(3)
+ ^
diff --git a/test/files/buildmanager/t2789/t2789.test b/test/files/buildmanager/t2789/t2789.test
new file mode 100644
index 0000000000..6f3bd03361
--- /dev/null
+++ b/test/files/buildmanager/t2789/t2789.test
@@ -0,0 +1,3 @@
+>>compile A.scala B.scala
+>>update A.scala=>A2.scala
+>>compile A.scala
diff --git a/test/files/buildmanager/t2790/A.scala b/test/files/buildmanager/t2790/A.scala
new file mode 100644
index 0000000000..6e9c1a90db
--- /dev/null
+++ b/test/files/buildmanager/t2790/A.scala
@@ -0,0 +1,5 @@
+object A {
+ def x(f: String, g: Int): Int = g
+ def x(f: Int, g: Int = 3): Int = g
+}
+
diff --git a/test/files/buildmanager/t2790/B.scala b/test/files/buildmanager/t2790/B.scala
new file mode 100644
index 0000000000..441055ca12
--- /dev/null
+++ b/test/files/buildmanager/t2790/B.scala
@@ -0,0 +1,4 @@
+object B {
+ val y = A.x(5)
+}
+
diff --git a/test/files/buildmanager/t2790/t2790.changes/A2.scala b/test/files/buildmanager/t2790/t2790.changes/A2.scala
new file mode 100644
index 0000000000..704ef4e96e
--- /dev/null
+++ b/test/files/buildmanager/t2790/t2790.changes/A2.scala
@@ -0,0 +1,4 @@
+object A {
+ def x(f: String, g: Int = 3): Int = g
+ def x(f: Int, g: Int): Int = g
+}
diff --git a/test/files/buildmanager/t2790/t2790.check b/test/files/buildmanager/t2790/t2790.check
new file mode 100644
index 0000000000..065956765a
--- /dev/null
+++ b/test/files/buildmanager/t2790/t2790.check
@@ -0,0 +1,14 @@
+builder > A.scala B.scala
+compiling Set(A.scala, B.scala)
+Changes: Map()
+builder > A.scala
+compiling Set(A.scala)
+Changes: Map(object A -> List(Added(Definition(A.x)), Changed(Definition(A.x))[value x changed from (f: java.lang.String,g: Int)Int to (f: java.lang.String,g: Int)Int <and> (f: Int,g: Int)Int flags: <method>]))
+invalidate B.scala because it references changed definition [Changed(Definition(A.x))[value x changed from (f: java.lang.String,g: Int)Int to (f: java.lang.String,g: Int)Int <and> (f: Int,g: Int)Int flags: <method>]]
+compiling Set(B.scala)
+B.scala:2: error: type mismatch;
+ found : Int(5)
+ required: String
+Error occured in an application involving default arguments.
+ val y = A.x(5)
+ ^
diff --git a/test/files/buildmanager/t2790/t2790.test b/test/files/buildmanager/t2790/t2790.test
new file mode 100644
index 0000000000..6f3bd03361
--- /dev/null
+++ b/test/files/buildmanager/t2790/t2790.test
@@ -0,0 +1,3 @@
+>>compile A.scala B.scala
+>>update A.scala=>A2.scala
+>>compile A.scala
diff --git a/test/files/buildmanager/t3045/A.java b/test/files/buildmanager/t3045/A.java
new file mode 100644
index 0000000000..d1acb00cd6
--- /dev/null
+++ b/test/files/buildmanager/t3045/A.java
@@ -0,0 +1,7 @@
+public interface A {
+ public class C implements A {}
+}
+
+class B {
+ static class C {}
+}
diff --git a/test/files/buildmanager/t3045/t3045.check b/test/files/buildmanager/t3045/t3045.check
new file mode 100644
index 0000000000..5e4e71e045
--- /dev/null
+++ b/test/files/buildmanager/t3045/t3045.check
@@ -0,0 +1,3 @@
+builder > A.java
+compiling Set(A.java)
+Changes: Map()
diff --git a/test/files/buildmanager/t3045/t3045.test b/test/files/buildmanager/t3045/t3045.test
new file mode 100644
index 0000000000..6cf7e35543
--- /dev/null
+++ b/test/files/buildmanager/t3045/t3045.test
@@ -0,0 +1 @@
+>>compile A.java
diff --git a/test/files/buildmanager/t3054/bar/Bar.java b/test/files/buildmanager/t3054/bar/Bar.java
new file mode 100644
index 0000000000..e1b056d4e5
--- /dev/null
+++ b/test/files/buildmanager/t3054/bar/Bar.java
@@ -0,0 +1,7 @@
+package bar;
+import foo.Foo$;
+
+
+public class Bar {
+ void bar() { Foo$.MODULE$.foo(); }
+}
diff --git a/test/files/buildmanager/t3054/foo/Foo.scala b/test/files/buildmanager/t3054/foo/Foo.scala
new file mode 100644
index 0000000000..c4838b9958
--- /dev/null
+++ b/test/files/buildmanager/t3054/foo/Foo.scala
@@ -0,0 +1,5 @@
+package foo
+
+class Foo {
+ def foo = println("foo")
+}
diff --git a/test/files/buildmanager/t3054/t3054.check b/test/files/buildmanager/t3054/t3054.check
new file mode 100644
index 0000000000..97cca8862e
--- /dev/null
+++ b/test/files/buildmanager/t3054/t3054.check
@@ -0,0 +1,3 @@
+builder > bar/Bar.java foo/Foo.scala
+compiling Set(bar/Bar.java, foo/Foo.scala)
+Changes: Map()
diff --git a/test/files/buildmanager/t3054/t3054.test b/test/files/buildmanager/t3054/t3054.test
new file mode 100644
index 0000000000..903df24b13
--- /dev/null
+++ b/test/files/buildmanager/t3054/t3054.test
@@ -0,0 +1 @@
+>>compile bar/Bar.java foo/Foo.scala
diff --git a/test/files/buildmanager/t3133/A.java b/test/files/buildmanager/t3133/A.java
new file mode 100644
index 0000000000..9cf4d96bd4
--- /dev/null
+++ b/test/files/buildmanager/t3133/A.java
@@ -0,0 +1,7 @@
+public class A {
+ class Foo {}
+
+ public A(Foo a) {}
+
+ private void bar(Foo z) {}
+}
diff --git a/test/files/buildmanager/t3133/t3133.check b/test/files/buildmanager/t3133/t3133.check
new file mode 100644
index 0000000000..5e4e71e045
--- /dev/null
+++ b/test/files/buildmanager/t3133/t3133.check
@@ -0,0 +1,3 @@
+builder > A.java
+compiling Set(A.java)
+Changes: Map()
diff --git a/test/files/buildmanager/t3133/t3133.test b/test/files/buildmanager/t3133/t3133.test
new file mode 100644
index 0000000000..6cf7e35543
--- /dev/null
+++ b/test/files/buildmanager/t3133/t3133.test
@@ -0,0 +1 @@
+>>compile A.java
diff --git a/test/files/files.iml b/test/files/files.iml
deleted file mode 100644
index ac78e33f39..0000000000
--- a/test/files/files.iml
+++ /dev/null
@@ -1,17 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<module relativePaths="true" type="JAVA_MODULE" version="4">
- <component name="FacetManager">
- <facet type="Scala" name="Scala">
- <configuration />
- </facet>
- </component>
- <component name="NewModuleRootManager" inherit-compiler-output="true">
- <exclude-output />
- <content url="file://$MODULE_DIR$" />
- <orderEntry type="library" name="lib1" level="project" />
- <orderEntry type="inheritedJdk" />
- <orderEntry type="sourceFolder" forTests="false" />
- <orderEntry type="library" name="lib" level="project" />
- </component>
-</module>
-
diff --git a/test/files/jvm/actor-exceptions.check b/test/files/jvm/actor-exceptions.check
index 021ccea1f1..bd44b968cc 100644
--- a/test/files/jvm/actor-exceptions.check
+++ b/test/files/jvm/actor-exceptions.check
@@ -1,11 +1,3 @@
-problem
-received A
-problem
-received A
-problem
-received A
-problem
-received A
-problem
-received last A
-slave exited because of java.lang.Exception: unhandled
+Uncaught exception in Slave
+Message: A
+MyOtherException
diff --git a/test/files/jvm/actor-exceptions.scala b/test/files/jvm/actor-exceptions.scala
index ccec84c56c..77bf05c048 100644
--- a/test/files/jvm/actor-exceptions.scala
+++ b/test/files/jvm/actor-exceptions.scala
@@ -2,7 +2,13 @@
import scala.actors.{Actor, Exit}
import Actor._
-case class MyException(text: String) extends Exception
+case class MyException(text: String) extends Exception {
+ override def fillInStackTrace() = this
+}
+
+case class MyOtherException(text: String) extends Exception {
+ override def fillInStackTrace() = this
+}
object Master extends Actor {
trapExit = true
@@ -11,14 +17,15 @@ object Master extends Actor {
Slave.start()
for (i <- 0 until 10) Slave ! A
react {
- case Exit(from, reason) => println("slave exited because of "+reason)
+ case Exit(from, reason) =>
}
}
}
object Slave extends Actor {
+ override def toString = "Slave"
override def exceptionHandler: PartialFunction[Exception, Unit] = {
- case MyException(text) => println(text)
+ case MyException(text) =>
}
def act() {
var cnt = 0
@@ -27,11 +34,8 @@ object Slave extends Actor {
case A =>
cnt += 1
if (cnt % 2 != 0) throw MyException("problem")
- if (cnt < 10)
- println("received A")
- else {
- println("received last A")
- throw new Exception("unhandled")
+ if (cnt == 10) {
+ throw new MyOtherException("unhandled")
}
}
}
diff --git a/test/files/jvm/actor-executor.check b/test/files/jvm/actor-executor.check
new file mode 100644
index 0000000000..bdbdb5c6a2
--- /dev/null
+++ b/test/files/jvm/actor-executor.check
@@ -0,0 +1,20 @@
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
diff --git a/test/files/jvm/actor-executor.scala b/test/files/jvm/actor-executor.scala
new file mode 100644
index 0000000000..435c666fff
--- /dev/null
+++ b/test/files/jvm/actor-executor.scala
@@ -0,0 +1,65 @@
+import java.util.concurrent.Executors
+import scala.actors.{Actor, SchedulerAdapter}
+import Actor._
+
+trait AdaptedActor extends Actor {
+ override def scheduler =
+ Test.scheduler
+}
+
+object One extends AdaptedActor {
+ def act() {
+ Two.start()
+ var i = 0
+ loopWhile (i < 10000) {
+ i += 1
+ Two ! 'MsgForTwo
+ react {
+ case 'MsgForOne =>
+ if (i % 1000 == 0)
+ println("One: OK")
+ if (i == 10000)
+ Test.executor.shutdown()
+ }
+ }
+ }
+}
+
+object Two extends AdaptedActor {
+ def act() {
+ var i = 0
+ loopWhile (i < 10000) {
+ i += 1
+ react {
+ case 'MsgForTwo =>
+ if (i % 1000 == 0)
+ println("Two: OK")
+ One ! 'MsgForOne
+ }
+ }
+ }
+}
+
+object Test {
+ val executor =
+ Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors())
+
+ val scheduler =
+ new SchedulerAdapter {
+ def execute(block: => Unit) {
+ val task = new Runnable {
+ def run() { block }
+ }
+ try {
+ executor.execute(task)
+ } catch {
+ case ree: java.util.concurrent.RejectedExecutionException =>
+ task.run()
+ }
+ }
+ }
+
+ def main(args: Array[String]) {
+ One.start()
+ }
+}
diff --git a/test/files/jvm/actor-executor2.check b/test/files/jvm/actor-executor2.check
new file mode 100644
index 0000000000..da78f45836
--- /dev/null
+++ b/test/files/jvm/actor-executor2.check
@@ -0,0 +1,21 @@
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+One exited
diff --git a/test/files/jvm/actor-executor2.scala b/test/files/jvm/actor-executor2.scala
new file mode 100644
index 0000000000..9e99e167aa
--- /dev/null
+++ b/test/files/jvm/actor-executor2.scala
@@ -0,0 +1,67 @@
+import scala.actors.{Actor, SchedulerAdapter, Exit}
+import Actor._
+import java.util.concurrent.Executors
+
+object One extends AdaptedActor {
+ def act() {
+ Two.start()
+ var i = 0
+ loopWhile (i < Test.NUM_MSG) {
+ i += 1
+ Two ! 'MsgForTwo
+ react {
+ case 'MsgForOne =>
+ if (i % (Test.NUM_MSG/10) == 0)
+ println("One: OK")
+ }
+ }
+ }
+}
+
+object Two extends AdaptedActor {
+ def act() {
+ var i = 0
+ loopWhile (i < Test.NUM_MSG) {
+ i += 1
+ react {
+ case 'MsgForTwo =>
+ if (i % (Test.NUM_MSG/10) == 0)
+ println("Two: OK")
+ One ! 'MsgForOne
+ }
+ }
+ }
+}
+
+trait AdaptedActor extends Actor {
+ override def scheduler =
+ Test.scheduler
+}
+
+object Test {
+ val NUM_MSG = 100000
+
+ val executor =
+ Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors())
+
+ val scheduler =
+ new SchedulerAdapter {
+ def execute(block: => Unit) {
+ executor.execute(new Runnable {
+ def run() { block }
+ })
+ }
+ }
+
+ def main(args: Array[String]) {
+ self.trapExit = true
+ link(One)
+ One.start()
+
+ receive {
+ case Exit(from, reason) =>
+ println("One exited")
+ Test.executor.shutdown()
+ }
+ }
+}
diff --git a/test/files/jvm/actor-executor3.check b/test/files/jvm/actor-executor3.check
new file mode 100644
index 0000000000..bdbdb5c6a2
--- /dev/null
+++ b/test/files/jvm/actor-executor3.check
@@ -0,0 +1,20 @@
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
diff --git a/test/files/jvm/actor-executor3.scala b/test/files/jvm/actor-executor3.scala
new file mode 100644
index 0000000000..bf060b8ac5
--- /dev/null
+++ b/test/files/jvm/actor-executor3.scala
@@ -0,0 +1,52 @@
+import scala.actors.Actor
+import scala.actors.scheduler.ExecutorScheduler
+import java.util.concurrent.Executors
+
+object One extends AdaptedActor {
+ def act() {
+ Two.start()
+ var i = 0
+ loopWhile (i < Test.NUM_MSG) {
+ i += 1
+ Two ! 'MsgForTwo
+ react {
+ case 'MsgForOne =>
+ if (i % (Test.NUM_MSG/10) == 0)
+ println("One: OK")
+ }
+ }
+ }
+}
+
+object Two extends AdaptedActor {
+ def act() {
+ var i = 0
+ loopWhile (i < Test.NUM_MSG) {
+ i += 1
+ react {
+ case 'MsgForTwo =>
+ if (i % (Test.NUM_MSG/10) == 0)
+ println("Two: OK")
+ One ! 'MsgForOne
+ }
+ }
+ }
+}
+
+trait AdaptedActor extends Actor {
+ override def scheduler =
+ Test.scheduler
+}
+
+object Test {
+ val NUM_MSG = 100000
+
+ val executor =
+ Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors())
+
+ val scheduler = ExecutorScheduler(executor)
+
+ def main(args: Array[String]) {
+ One.start()
+ }
+}
diff --git a/test/files/jvm/actor-getstate.check b/test/files/jvm/actor-getstate.check
new file mode 100644
index 0000000000..2c94e48371
--- /dev/null
+++ b/test/files/jvm/actor-getstate.check
@@ -0,0 +1,2 @@
+OK
+OK
diff --git a/test/files/jvm/actor-getstate.scala b/test/files/jvm/actor-getstate.scala
new file mode 100644
index 0000000000..a6e15a8721
--- /dev/null
+++ b/test/files/jvm/actor-getstate.scala
@@ -0,0 +1,85 @@
+import scala.actors.{Reactor, Actor, TIMEOUT}
+import Actor._
+
+object Test {
+
+ def assert(cond: => Boolean, hint: String) {
+ if (!cond)
+ println("FAIL ["+hint+"]")
+ }
+
+ def expectActorState(a: Reactor[T] forSome { type T }, s: Actor.State.Value) {
+ var done = false
+ var i = 0
+ while (!done) {
+ i = i + 1
+ if (i == 10) { // only wait for 2 seconds total
+ println("FAIL ["+a+": expected "+s+"]")
+ done = true
+ }
+
+ Thread.sleep(200)
+ if (a.getState == s) // success
+ done = true
+ }
+ }
+
+ def main(args: Array[String]) {
+ actor {
+ val a = new Reactor[Any] {
+ def act() {
+ assert(getState == Actor.State.Runnable, "runnable1")
+ react {
+ case 'go =>
+ println("OK")
+ }
+ }
+ }
+ expectActorState(a, Actor.State.New)
+
+ a.start()
+ expectActorState(a, Actor.State.Suspended)
+
+ a ! 'go
+ expectActorState(a, Actor.State.Terminated)
+
+ val b = new Actor {
+ def act() {
+ assert(getState == Actor.State.Runnable, "runnable2: "+getState)
+ react {
+ case 'go =>
+ reactWithin(100000) {
+ case TIMEOUT =>
+ case 'go =>
+ receive {
+ case 'go =>
+ }
+ receiveWithin(100000) {
+ case TIMEOUT =>
+ case 'go =>
+ println("OK")
+ }
+ }
+ }
+ }
+ }
+ expectActorState(b, Actor.State.New)
+
+ b.start()
+ expectActorState(b, Actor.State.Suspended)
+
+ b ! 'go
+ expectActorState(b, Actor.State.TimedSuspended)
+
+ b ! 'go
+ expectActorState(b, Actor.State.Blocked)
+
+ b ! 'go
+ expectActorState(b, Actor.State.TimedBlocked)
+
+ b ! 'go
+ expectActorState(b, Actor.State.Terminated)
+ }
+ }
+
+}
diff --git a/test/files/jvm/actor-link-getstate.check b/test/files/jvm/actor-link-getstate.check
new file mode 100644
index 0000000000..45967222e6
--- /dev/null
+++ b/test/files/jvm/actor-link-getstate.check
@@ -0,0 +1,5 @@
+Done
+Uncaught exception in Master
+Message: 'done
+MyException: Master crashed
+Terminated
diff --git a/test/files/jvm/actor-link-getstate.scala b/test/files/jvm/actor-link-getstate.scala
new file mode 100644
index 0000000000..ec01757883
--- /dev/null
+++ b/test/files/jvm/actor-link-getstate.scala
@@ -0,0 +1,47 @@
+import scala.actors.{Actor, Exit}
+import scala.actors.Actor._
+
+case class MyException(text: String) extends Exception(text) {
+ override def fillInStackTrace() = this
+}
+
+object Slave extends Actor {
+ def act() {
+ loop {
+ react {
+ case 'doWork =>
+ Console.err.println("Done")
+ reply('done)
+ }
+ }
+ }
+}
+
+object Master extends Actor {
+ override def toString = "Master"
+ def act() {
+ link(Slave)
+ Slave ! 'doWork
+ react {
+ case 'done =>
+ throw new MyException("Master crashed")
+ }
+ }
+}
+
+object Test {
+
+ def main(args: Array[String]) {
+ actor {
+ self.trapExit = true
+ link(Slave)
+ Slave.start()
+ Master.start()
+ react {
+ case Exit(from, reason) if (from == Slave) =>
+ Console.err.println(Slave.getState)
+ }
+ }
+ }
+
+}
diff --git a/test/files/jvm/actor-receivewithin.scala b/test/files/jvm/actor-receivewithin.scala
index c6818cf211..a5c87c2722 100644
--- a/test/files/jvm/actor-receivewithin.scala
+++ b/test/files/jvm/actor-receivewithin.scala
@@ -29,6 +29,7 @@ object A extends Actor {
}
}
B ! 'next
+ receive { case 'done => }
cnt = 0
while (cnt < 501) {
cnt += 1
@@ -56,6 +57,7 @@ object B extends Actor {
for (_ <- 1 to 500) {
A ! 'msg2
}
+ A ! 'done
}
}
}
diff --git a/test/files/jvm/actor-uncaught-exception.check b/test/files/jvm/actor-uncaught-exception.check
new file mode 100644
index 0000000000..3e669779df
--- /dev/null
+++ b/test/files/jvm/actor-uncaught-exception.check
@@ -0,0 +1,5 @@
+Uncaught exception in StartError
+MyException: I don't want to run!
+Uncaught exception in MessageError
+Message: 'ping
+MyException: No message for me!
diff --git a/test/files/jvm/actor-uncaught-exception.scala b/test/files/jvm/actor-uncaught-exception.scala
new file mode 100644
index 0000000000..9f64be26e1
--- /dev/null
+++ b/test/files/jvm/actor-uncaught-exception.scala
@@ -0,0 +1,46 @@
+import scala.actors.{Actor, Exit}
+
+class MyException(msg: String) extends Exception(msg) {
+ override def fillInStackTrace() = this
+}
+
+object Test {
+
+ case object StartError extends Actor {
+ def act() {
+ throw new MyException("I don't want to run!")
+ }
+ }
+
+ case object MessageError extends Actor {
+ def act() {
+ react {
+ case _ => throw new MyException("No message for me!")
+ }
+ }
+ }
+
+ case object Supervisor extends Actor {
+ def act() {
+ trapExit = true
+ link(StartError)
+ link(MessageError)
+ StartError.start()
+ MessageError.start()
+
+ Actor.loop {
+ react {
+ case Exit(actor, reason) =>
+ if (actor == StartError)
+ MessageError ! 'ping
+ else
+ exit()
+ }
+ }
+ }
+ }
+
+ def main(args: Array[String]) {
+ Supervisor.start()
+ }
+}
diff --git a/test/files/jvm/deprecation.cmds b/test/files/jvm/deprecation.cmds
new file mode 100644
index 0000000000..4c0f73c58b
--- /dev/null
+++ b/test/files/jvm/deprecation.cmds
@@ -0,0 +1,3 @@
+javac Defs.java
+scalac Test_1.scala
+javac Use_2.java
diff --git a/test/files/jvm/future-alarm.check b/test/files/jvm/future-alarm.check
new file mode 100644
index 0000000000..01a87d1c4c
--- /dev/null
+++ b/test/files/jvm/future-alarm.check
@@ -0,0 +1,20 @@
+OK
+OK
+OK
+OK
+OK
+OK
+OK
+OK
+OK
+OK
+OK
+OK
+OK
+OK
+OK
+OK
+OK
+OK
+OK
+OK
diff --git a/test/files/jvm/future-alarm.scala b/test/files/jvm/future-alarm.scala
new file mode 100644
index 0000000000..0dda492741
--- /dev/null
+++ b/test/files/jvm/future-alarm.scala
@@ -0,0 +1,16 @@
+import scala.actors.Futures
+
+object Test {
+ def main(args: Array[String]) {
+ for (i <- 1 to 100000) {
+ Futures.alarm(0)
+ if (i % 10000 == 0)
+ println("OK")
+ }
+ for (_ <- 1 to 10) {
+ val ft = Futures.alarm(100)
+ ft()
+ println("OK")
+ }
+ }
+}
diff --git a/test/files/lib/genericNest.jar.desired.sha1 b/test/files/jvm/genericNest/genericNest.jar.desired.sha1
index e9321262f2..e9321262f2 100644
--- a/test/files/lib/genericNest.jar.desired.sha1
+++ b/test/files/jvm/genericNest/genericNest.jar.desired.sha1
diff --git a/test/files/jvm/genericNest.scala b/test/files/jvm/genericNest/genericNest.scala
index c1b0210117..c1b0210117 100644
--- a/test/files/jvm/genericNest.scala
+++ b/test/files/jvm/genericNest/genericNest.scala
diff --git a/test/files/jvm/inner.scala b/test/files/jvm/inner.scala
index 51e3909ef3..d0d2226de6 100644
--- a/test/files/jvm/inner.scala
+++ b/test/files/jvm/inner.scala
@@ -53,8 +53,8 @@ class A {
}
object Scalatest {
- private val outputdir = System.getProperty("scalatest.output", "inner-jvm.obj")
- private val scalalib = System.getProperty("scalatest.lib", "")
+ private val outputdir = System.getProperty("partest.output", "inner.obj")
+ private val scalalib = System.getProperty("partest.lib", "")
private val classpath = outputdir + File.pathSeparator + scalalib
private val javabin = {
val jhome = new File(System.getProperty("java.home"))
diff --git a/test/files/jvm/interpreter.check b/test/files/jvm/interpreter.check
index 6c89318470..b74ba1be0a 100644
--- a/test/files/jvm/interpreter.check
+++ b/test/files/jvm/interpreter.check
@@ -19,7 +19,7 @@ scala> defined type alias anotherint
scala> four: anotherint = 4
-scala> <console>:5: error: type mismatch;
+scala> <console>:6: error: type mismatch;
found : java.lang.String("hello")
required: anotherint
val bogus: anotherint = "hello"
@@ -169,7 +169,7 @@ scala> res4: Array[_] = Array(2)
scala> res5: Array[java.lang.String] = Array(abc, abc)
-scala> res6: scala.collection.mutable.GenericArray[_] = GenericArray(1, 2)
+scala> res6: scala.collection.mutable.ArraySeq[_] = ArraySeq(1, 2)
scala> res7: Array[(_$1, _$1)] forSome { type _$1 } = Array((1,1), (2,2))
@@ -217,9 +217,9 @@ scala> defined class Exp
defined class Fact
defined class Term
-scala> | | <console>:15: warning: match is not exhaustive!
-missing combination Term
+scala> | | <console>:16: warning: match is not exhaustive!
missing combination Exp
+missing combination Term
def f(e: Exp) = e match { // non-exhaustive warning here
^
@@ -229,7 +229,7 @@ scala>
scala>
plusOne: (x: Int)Int
res0: Int = 6
-res0: java.lang.String = after reset
-<console>:5: error: not found: value plusOne
+res1: java.lang.String = after reset
+<console>:6: error: not found: value plusOne
plusOne(5) // should be undefined now
^
diff --git a/test/files/lib/annotations.jar.desired.sha1 b/test/files/jvm/lib/annotations.jar.desired.sha1
index 2b4292d796..2b4292d796 100644
--- a/test/files/lib/annotations.jar.desired.sha1
+++ b/test/files/jvm/lib/annotations.jar.desired.sha1
diff --git a/test/files/lib/nest.jar.desired.sha1 b/test/files/jvm/lib/nest.jar.desired.sha1
index 674ca79a5b..674ca79a5b 100644
--- a/test/files/lib/nest.jar.desired.sha1
+++ b/test/files/jvm/lib/nest.jar.desired.sha1
diff --git a/test/files/lib/methvsfield.jar.desired.sha1 b/test/files/jvm/methvsfield/methvsfield.jar.desired.sha1
index 8c01532b88..8c01532b88 100644
--- a/test/files/lib/methvsfield.jar.desired.sha1
+++ b/test/files/jvm/methvsfield/methvsfield.jar.desired.sha1
diff --git a/test/files/jvm/methvsfield.java b/test/files/jvm/methvsfield/methvsfield.java
index dadc98669a..dadc98669a 100644
--- a/test/files/jvm/methvsfield.java
+++ b/test/files/jvm/methvsfield/methvsfield.java
diff --git a/test/files/jvm/methvsfield.scala b/test/files/jvm/methvsfield/methvsfield.scala
index e9795ec6a8..e9795ec6a8 100644
--- a/test/files/jvm/methvsfield.scala
+++ b/test/files/jvm/methvsfield/methvsfield.scala
diff --git a/test/files/jvm/nest.java b/test/files/jvm/nest/nest.java
index 3f6f0bebbd..3f6f0bebbd 100644
--- a/test/files/jvm/nest.java
+++ b/test/files/jvm/nest/nest.java
diff --git a/test/files/jvm/nest.scala b/test/files/jvm/nest/nest.scala
index 3ab62484fa..3ab62484fa 100644
--- a/test/files/jvm/nest.scala
+++ b/test/files/jvm/nest/nest.scala
diff --git a/test/files/lib/enums.jar.desired.sha1 b/test/files/jvm/outerEnum/enums.jar.desired.sha1
index 46cd8e92cf..46cd8e92cf 100644
--- a/test/files/lib/enums.jar.desired.sha1
+++ b/test/files/jvm/outerEnum/enums.jar.desired.sha1
diff --git a/test/files/jvm/outerEnum.scala b/test/files/jvm/outerEnum/outerEnum.scala
index 18794b7dbe..18794b7dbe 100644
--- a/test/files/jvm/outerEnum.scala
+++ b/test/files/jvm/outerEnum/outerEnum.scala
diff --git a/test/files/jvm/reactor-exceptionOnSend.scala b/test/files/jvm/reactor-exceptionOnSend.scala
index 3684943b9b..c89aab334b 100644
--- a/test/files/jvm/reactor-exceptionOnSend.scala
+++ b/test/files/jvm/reactor-exceptionOnSend.scala
@@ -3,7 +3,7 @@ import scala.actors.Actor._
case class MyException(text: String) extends Exception(text)
-object A extends Reactor {
+object A extends Reactor[Any] {
override def exceptionHandler = {
case MyException(text) =>
println("receiver handles exception")
@@ -29,7 +29,7 @@ object A extends Reactor {
}
}
-object B extends Reactor {
+object B extends Reactor[Any] {
def act() {
A.start()
A ! 'hello
diff --git a/test/files/jvm/reactor-producer-consumer.scala b/test/files/jvm/reactor-producer-consumer.scala
index 946e1561ce..0d33043fc6 100644
--- a/test/files/jvm/reactor-producer-consumer.scala
+++ b/test/files/jvm/reactor-producer-consumer.scala
@@ -2,10 +2,10 @@ import scala.actors.Reactor
object Test {
case class Stop()
- case class Get(from: Reactor)
+ case class Get(from: Reactor[Any])
case class Put(x: Int)
- class UnboundedBuffer extends Reactor {
+ class UnboundedBuffer extends Reactor[Any] {
def act() {
react {
case Stop() =>
@@ -20,7 +20,7 @@ object Test {
}
}
- class Producer(buf: UnboundedBuffer, n: Int, delay: Long, parent: Reactor) extends Reactor {
+ class Producer(buf: UnboundedBuffer, n: Int, delay: Long, parent: Reactor[Any]) extends Reactor[Any] {
def act() {
var i = 0
while (i < n) {
@@ -32,7 +32,7 @@ object Test {
}
}
- class Consumer(buf: UnboundedBuffer, n: Int, delay: Long, parent: Reactor) extends Reactor {
+ class Consumer(buf: UnboundedBuffer, n: Int, delay: Long, parent: Reactor[Any]) extends Reactor[Any] {
val step = n / 10
var i = 0
def act() {
@@ -53,7 +53,7 @@ object Test {
}
def main(args: Array[String]) {
- val parent = new Reactor {
+ val parent = new Reactor[Any] {
def act() {
val buffer = new UnboundedBuffer
buffer.start()
diff --git a/test/files/jvm/reactor.scala b/test/files/jvm/reactor.scala
index 8985f90569..12d5c7c221 100644
--- a/test/files/jvm/reactor.scala
+++ b/test/files/jvm/reactor.scala
@@ -1,13 +1,12 @@
import scala.actors.Reactor
-import scala.actors.Actor._
-case class Ping(from: Reactor)
+case class Ping(from: Reactor[Any])
case object Pong
case object Stop
/**
- * Ping pong example for OutputChannelActor.
+ * Ping pong example for Reactor.
*
* @author Philipp Haller
*/
@@ -20,7 +19,7 @@ object Test {
}
}
-class PingActor(count: Int, pong: Reactor) extends Reactor {
+class PingActor(count: Int, pong: Reactor[Any]) extends Reactor[Any] {
def act() {
var pingsLeft = count - 1
pong ! Ping(this)
@@ -42,7 +41,7 @@ class PingActor(count: Int, pong: Reactor) extends Reactor {
}
}
-class PongActor extends Reactor {
+class PongActor extends Reactor[Any] {
def act() {
var pongCount = 0
loop {
diff --git a/test/files/jvm/replyablereactor.scala b/test/files/jvm/replyablereactor.scala
index 368d172e3f..896a0bf440 100644
--- a/test/files/jvm/replyablereactor.scala
+++ b/test/files/jvm/replyablereactor.scala
@@ -1,5 +1,4 @@
-import scala.actors._
-import scala.actors.Actor._
+import scala.actors.ReplyReactor
class MyActor extends ReplyReactor {
def act() {
@@ -19,7 +18,7 @@ object Test {
val a = new MyActor
a.start()
- val b = new Reactor {
+ val b = new ReplyReactor {
def act() {
react {
case r: MyActor =>
diff --git a/test/files/jvm/replyablereactor2.scala b/test/files/jvm/replyablereactor2.scala
index 22622274dd..57b7cfe201 100644
--- a/test/files/jvm/replyablereactor2.scala
+++ b/test/files/jvm/replyablereactor2.scala
@@ -19,7 +19,7 @@ object Test {
val a = new MyActor
a.start()
- val b = new Reactor {
+ val b = new Reactor[Any] {
def act() {
react {
case r: MyActor =>
diff --git a/test/files/jvm/replyablereactor3.scala b/test/files/jvm/replyablereactor3.scala
index 676ffe98e6..b33db811e2 100644
--- a/test/files/jvm/replyablereactor3.scala
+++ b/test/files/jvm/replyablereactor3.scala
@@ -19,7 +19,7 @@ object Test {
val a = new MyActor
a.start()
- val b = new Reactor {
+ val b = new Reactor[Any] {
def act() {
react {
case r: MyActor =>
diff --git a/test/files/jvm/replyablereactor4.scala b/test/files/jvm/replyablereactor4.scala
index d61fb64287..dc24f5e88d 100644
--- a/test/files/jvm/replyablereactor4.scala
+++ b/test/files/jvm/replyablereactor4.scala
@@ -19,7 +19,7 @@ object Test {
val a = new MyActor
a.start()
- val b = new Reactor {
+ val b = new Reactor[Any] {
def act() {
react {
case r: MyActor =>
diff --git a/test/files/jvm/replyreactor.scala b/test/files/jvm/replyreactor.scala
index 0f452dbc7b..fb915cf3f9 100644
--- a/test/files/jvm/replyreactor.scala
+++ b/test/files/jvm/replyreactor.scala
@@ -1,4 +1,4 @@
-import scala.actors.{Reactor, ReplyReactor}
+import scala.actors.ReplyReactor
object Test {
def main(args: Array[String]) {
@@ -12,10 +12,10 @@ object Test {
}
a.start()
- val b = new Reactor {
+ val b = new ReplyReactor {
def act() {
react {
- case r: Reactor =>
+ case r: ReplyReactor =>
r ! 'hello
react {
case any =>
diff --git a/test/files/jvm/serialization.check b/test/files/jvm/serialization.check
index 2b0ad3888b..3f095cb51e 100644
--- a/test/files/jvm/serialization.check
+++ b/test/files/jvm/serialization.check
@@ -33,9 +33,9 @@ x10 eq y10: true, y10 eq x10: true
x10 equals y10: true, y10 equals x10: true
x9 eq x10: false, x10 eq x9: false
-x9 equals x10: true, x10 equals x9: true
+x9 equals x10: false, x10 equals x9: false
x9 eq y10: false, y10 eq x9: false
-x9 equals y10: true, y10 equals x9: true
+x9 equals y10: false, y10 equals x9: false
f1 = <na>
_f1 = <na>
@@ -77,12 +77,12 @@ x = BitSet(2, 3)
y = BitSet(2, 3)
x equals y: true, y equals x: true
-x = Map(2 -> B, 1 -> A, 3 -> C)
+x = Map(1 -> A, 2 -> B, 3 -> C)
y = Map(1 -> A, 2 -> B, 3 -> C)
x equals y: true, y equals x: true
x = Set(1, 2)
-y = Set(2, 1)
+y = Set(1, 2)
x equals y: true, y equals x: true
x = List((buffers,20), (layers,2), (title,3))
diff --git a/test/files/jvm/t1652.check b/test/files/jvm/t1652.check
deleted file mode 100644
index dfa480ce6e..0000000000
--- a/test/files/jvm/t1652.check
+++ /dev/null
@@ -1,2 +0,0 @@
-OK1
-OK2
diff --git a/test/files/jvm/t2470.cmds b/test/files/jvm/t2470.cmds
new file mode 100644
index 0000000000..b4ef0f4aeb
--- /dev/null
+++ b/test/files/jvm/t2470.cmds
@@ -0,0 +1,3 @@
+javac Action.java Task.java
+scalac Test_1.scala
+scalac Read_Classfile_2.scala
diff --git a/test/files/jvm/t2827.check b/test/files/jvm/t2827.check
new file mode 100644
index 0000000000..c318e01ae5
--- /dev/null
+++ b/test/files/jvm/t2827.check
@@ -0,0 +1,3 @@
+Larry
+Curly
+Moe
diff --git a/test/files/jvm/t2827.scala b/test/files/jvm/t2827.scala
new file mode 100644
index 0000000000..d89e68516b
--- /dev/null
+++ b/test/files/jvm/t2827.scala
@@ -0,0 +1,14 @@
+object Stooges extends Enumeration {
+ type Stooge = Value
+ val Larry, Curly, Moe = Value
+ def nextStooge(v:Stooges.Stooge):Stooges.Stooge =
+ Stooges((v.id+1) % Stooges.maxId)
+}
+
+object Test {
+ def main(args: Array[String]) {
+ println(Stooges.Larry)
+ println(Stooges.Curly)
+ println(Stooges.Moe)
+ }
+}
diff --git a/test/files/jvm/t3003.check b/test/files/jvm/t3003.check
new file mode 100644
index 0000000000..c69e389d13
--- /dev/null
+++ b/test/files/jvm/t3003.check
@@ -0,0 +1 @@
+List(List(@Annot(optionType=class java.lang.String)))
diff --git a/test/files/jvm/t3003.cmds b/test/files/jvm/t3003.cmds
new file mode 100644
index 0000000000..c00396627c
--- /dev/null
+++ b/test/files/jvm/t3003.cmds
@@ -0,0 +1,2 @@
+javac Annot.java
+scalac Test_1.scala
diff --git a/test/files/jvm/t3003/Annot.java b/test/files/jvm/t3003/Annot.java
new file mode 100644
index 0000000000..1d5f206fd7
--- /dev/null
+++ b/test/files/jvm/t3003/Annot.java
@@ -0,0 +1,4 @@
+@java.lang.annotation.Retention(java.lang.annotation.RetentionPolicy.RUNTIME)
+public @interface Annot {
+ Class<?> optionType();
+}
diff --git a/test/files/jvm/t3003/Test_1.scala b/test/files/jvm/t3003/Test_1.scala
new file mode 100644
index 0000000000..ec7f220c94
--- /dev/null
+++ b/test/files/jvm/t3003/Test_1.scala
@@ -0,0 +1,8 @@
+class C {
+ @Annot(optionType=classOf[String]) val k = 0
+}
+object Test {
+ def main(args: Array[String]) {
+ println(classOf[C].getDeclaredFields.toList.sortBy(f => f.getName).map(f => f.getAnnotations.toList))
+ }
+}
diff --git a/test/files/jvm/t3102.check b/test/files/jvm/t3102.check
new file mode 100644
index 0000000000..d705e0b20e
--- /dev/null
+++ b/test/files/jvm/t3102.check
@@ -0,0 +1,2 @@
+42
+OK
diff --git a/test/files/jvm/t3102.scala b/test/files/jvm/t3102.scala
new file mode 100644
index 0000000000..ea3e720eca
--- /dev/null
+++ b/test/files/jvm/t3102.scala
@@ -0,0 +1,26 @@
+import scala.actors.{Actor, TIMEOUT}
+import Actor._
+
+object Test {
+ def main(args: Array[String]) {
+ val a = actor {
+ react {
+ case 'hello =>
+ reply(42)
+ }
+ }
+
+ val b = actor {
+ self.trapExit = true
+ val ft = a !! 'hello
+ println(ft())
+ // no message should be left over in mailbox
+ reactWithin(0) {
+ case TIMEOUT =>
+ println("OK")
+ case any =>
+ println(any)
+ }
+ }
+ }
+}
diff --git a/test/files/jvm/unittest_io.scala b/test/files/jvm/unittest_io.scala
index 0cf1165ec8..80d33d8433 100644
--- a/test/files/jvm/unittest_io.scala
+++ b/test/files/jvm/unittest_io.scala
@@ -16,7 +16,7 @@ it is split on several lines.
isn't it?
""")
def runTest() = assertEquals("wrong number of lines",src.getLines("\n").toList.length,5) // five new lines in there
- //for(val line <- src.getLines) {
+ //for (line <- src.getLines) {
// Console.print(line)
//}
}
diff --git a/test/files/jvm/xml01.scala b/test/files/jvm/xml01.scala
index e305f516d7..56e1c4ef96 100644
--- a/test/files/jvm/xml01.scala
+++ b/test/files/jvm/xml01.scala
@@ -25,20 +25,18 @@ object Test extends Application with Assert {
override def text = ""
}
- assertSameElements(List(3), List(3))
-
println("equality")
- assertEquals(c, parsedxml11)
- assertEquals(parsedxml1, parsedxml11)
- assertSameElements(List(parsedxml1), List(parsedxml11))
- assertSameElements(Array(parsedxml1).toList, List(parsedxml11))
+ assertEqualsXML(c, parsedxml11)
+ assertEqualsXML(parsedxml1, parsedxml11)
+ assertSameElementsXML(List(parsedxml1), List(parsedxml11))
+ assertSameElementsXML(Array(parsedxml1).toList, List(parsedxml11))
val x2 = "<book><author>Peter Buneman</author><author>Dan Suciu</author><title>Data on ze web</title></book>";
val i = new InputSource(new StringReader(x2))
val x2p = XML.load(i)
- assertEquals(x2p, Elem(null, "book" , e, sc,
+ assertEqualsXML(x2p, Elem(null, "book" , e, sc,
Elem(null, "author", e, sc,Text("Peter Buneman")),
Elem(null, "author", e, sc,Text("Dan Suciu")),
Elem(null, "title" , e, sc,Text("Data on ze web"))));
@@ -51,9 +49,9 @@ object Test extends Application with Assert {
println("xpath \\")
- assertSameElements(parsedxml1 \ "_" , List(Elem(null,"world", e, sc)))
+ assertSameElementsXML(parsedxml1 \ "_" , List(Elem(null,"world", e, sc)))
- assertSameElements(parsedxml1 \ "world", List(Elem(null,"world", e, sc)))
+ assertSameElementsXML(parsedxml1 \ "world", List(Elem(null,"world", e, sc)))
/*
Console.println( parsedxml2 \ "_" );
@@ -63,7 +61,7 @@ object Test extends Application with Assert {
};
*/
- assertSameElements(
+ assertSameElementsXML(
parsedxml2 \ "_" ,
List(
@@ -77,7 +75,7 @@ object Test extends Application with Assert {
);
assertEquals( (parsedxml2 \ "author").length, 0 );
- assertSameElements(
+ assertSameElementsXML(
parsedxml2 \ "book",
List(
@@ -91,7 +89,7 @@ object Test extends Application with Assert {
)
);
- assertSameElements(
+ assertSameElementsXML(
parsedxml2 \ "_" \ "_",
@@ -104,7 +102,7 @@ object Test extends Application with Assert {
)
);
- assertSameElements(
+ assertSameElementsXML(
parsedxml2 \ "_" \ "author",
@@ -116,11 +114,11 @@ object Test extends Application with Assert {
);
- assertSameElements( (parsedxml2 \ "_" \ "_" \ "author"), List() );
+ assertSameElementsXML( (parsedxml2 \ "_" \ "_" \ "author"), List() );
Console.println("xpath \\\\ DESCENDANTS");
- assertSameElements(
+ assertSameElementsXML(
parsedxml2 \\ "author",
@@ -133,7 +131,7 @@ object Test extends Application with Assert {
);
- assertSameElements(
+ assertSameElementsXML(
parsedxml2 \\ "title",
@@ -144,10 +142,10 @@ object Test extends Application with Assert {
println(
- (parsedxml2 \\ "book" ){ n:Node => n \ "title" == "Data on ze web" }
+ (parsedxml2 \\ "book" ){ n:Node => (n \ "title") xml_== "Data on ze web" }
);
- assertEquals(
+ assertEqualsXML(
(new NodeSeq { val theSeq = List( parsedxml2 ) }) \\ "_",
@@ -191,13 +189,13 @@ object Test extends Application with Assert {
val zz1 = <xml:group><a/><b/><c/></xml:group>
- assertTrue(zx1 == zz1)
+ assertTrue(zx1 xml_== zz1)
assertTrue(zz1.length == 3)
// unparsed
- val uup = <xml:unparsed>&<<>""^%@$!#</xml:unparsed>
- assertTrue(uup == "&<<>\"\"^%@$!#")
+ // val uup = <xml:unparsed>&<<>""^%@$!#</xml:unparsed>
+ // assertTrue(uup == "&<<>\"\"^%@$!#")
// test unicode escapes backslash u
println("attribute value normalization")
diff --git a/test/files/jvm/xml02.scala b/test/files/jvm/xml02.scala
index 0cbeb27ce2..11f77cc90f 100644
--- a/test/files/jvm/xml02.scala
+++ b/test/files/jvm/xml02.scala
@@ -18,10 +18,10 @@ object Test extends TestConsoleMain {
object XmlEx extends TestCase("attributes") with Assert {
override def runTest = {
- assertTrue("@one", ax \ "@foo" == "bar") // uses NodeSeq.view!
- assertTrue("@two", ax \ "@foo" == xml.Text("bar")) // dto.
- assertTrue("@three", bx \ "@foo" == "bar&x") // dto.
- assertTrue ("@four", (bx \ "@foo") sameElements List(xml.Text("bar&x")))
+ assertTrue("@one", (ax \ "@foo") xml_== "bar") // uses NodeSeq.view!
+ assertTrue("@two", (ax \ "@foo") xml_== xml.Text("bar")) // dto.
+ assertTrue("@three", (bx \ "@foo") xml_== "bar&x") // dto.
+ assertTrue ("@four", (bx \ "@foo") xml_sameElements List(xml.Text("bar&x")))
assertEquals("@five", "<hello foo=\"bar&amp;x\"></hello>", bx.toString)
}
}
@@ -29,8 +29,8 @@ object Test extends TestConsoleMain {
object XmlEy extends TestCase("attributes with namespace") with Assert {
override def runTest = {
val z = ax \ "@{the namespace from outer space}foo"
- assertTrue("@six", ax \ "@{the namespace from outer space}foo" == "baz")
- assertTrue("@eight", cx \ "@{the namespace from outer space}foo" == "baz")
+ assertTrue("@six", (ax \ "@{the namespace from outer space}foo") xml_== "baz")
+ assertTrue("@eight", (cx \ "@{the namespace from outer space}foo") xml_== "baz")
try {
ax \ "@"
@@ -58,8 +58,8 @@ object Test extends TestConsoleMain {
override def runTest = {
assertTrue(<hello/> match { case <hello/> => true; case _ => false; })
assertTrue(<x:ga xmlns:x="z"/> match { case <x:ga/> => true; case _ => false; });
- assertTrue(Utility.trim(cx) match { case n @ <hello>crazy text world</hello> if n \ "@foo" == "bar" => true; })
- assertTrue(Utility.trim(cx) match { case n @ <z:hello>crazy text world</z:hello> if n \ "@foo" == "bar" => true; })
+ assertTrue(Utility.trim(cx) match { case n @ <hello>crazy text world</hello> if (n \ "@foo") xml_== "bar" => true; })
+ assertTrue(Utility.trim(cx) match { case n @ <z:hello>crazy text world</z:hello> if (n \ "@foo") xml_== "bar" => true; })
}
}
diff --git a/test/files/jvm/xmlstuff.scala b/test/files/jvm/xmlstuff.scala
index 46faf283dc..6e711a0f86 100644
--- a/test/files/jvm/xmlstuff.scala
+++ b/test/files/jvm/xmlstuff.scala
@@ -64,9 +64,9 @@ passed ok
Text(x.attributes("value").toString + y.attributes("bazValue").toString+ "!")
};
val pelems_2 = new NodeSeq { val theSeq = List(Text("38!"),Text("58!")) };
- assertSameElements(pelems_1, pelems_2)
+ assertSameElementsXML(pelems_1, pelems_2)
- assertEquals(p \\ "@bazValue", Text("8"))
+ assertEqualsXML(p \\ "@bazValue", Text("8"))
val books =
<bks>
@@ -97,7 +97,7 @@ passed ok
println( new scala.xml.PrettyPrinter(80, 5).formatNodes (
for (t <- books \\ "title";
r <- reviews \\ "entry"
- if r \ "title" == t) yield
+ if (r \ "title") xml_== t) yield
<result>
{ t }
{ r \ "remarks" }
@@ -139,7 +139,7 @@ val addrBook =
println( new scala.xml.PrettyPrinter(80, 5).formatNodes (
for (t <- addrBook \\ "entry";
r <- phoneBook \\ "entry"
- if t \ "name" == r \ "name") yield
+ if (t \ "name") xml_== (r \ "name")) yield
<result>
{ t.child }
{ r \ "phone" }
diff --git a/test/files/neg/bug1279a.check b/test/files/neg/bug1279a.check
deleted file mode 100644
index edfd1fe871..0000000000
--- a/test/files/neg/bug1279a.check
+++ /dev/null
@@ -1,6 +0,0 @@
-bug1279a.scala:34: error: type mismatch;
- found : first.selfType
- required: M{type T <: this.T}
- def all4Impl[U](first: M {type T <: U}): Stream[M {type T <: U}] = Stream.cons(first, all4Impl(first.next))
- ^
-one error found
diff --git a/test/files/neg/bug1878.check b/test/files/neg/bug1878.check
index 5484d675af..f760781fa0 100644
--- a/test/files/neg/bug1878.check
+++ b/test/files/neg/bug1878.check
@@ -1,10 +1,15 @@
bug1878.scala:3: error: _* may only come last
val err1 = "" match { case Seq(f @ _*, ',') => f }
^
+bug1878.scala:3: error: scrutinee is incompatible with pattern type;
+ found : Seq[A]
+ required: java.lang.String
+ val err1 = "" match { case Seq(f @ _*, ',') => f }
+ ^
bug1878.scala:9: error: _* may only come last
val List(List(_*, arg2), _) = List(List(1,2,3), List(4,5,6))
^
bug1878.scala:13: error: _* may only come last
case <p> { _* } </p> =>
^
-three errors found
+four errors found
diff --git a/test/files/neg/bug2148.check b/test/files/neg/bug2148.check
new file mode 100644
index 0000000000..22be424c39
--- /dev/null
+++ b/test/files/neg/bug2148.check
@@ -0,0 +1,4 @@
+bug2148.scala:9: error: type A is not a stable prefix
+ val b = new A with A#A1
+ ^
+one error found
diff --git a/test/files/neg/bug2148.scala b/test/files/neg/bug2148.scala
new file mode 100644
index 0000000000..25788be84a
--- /dev/null
+++ b/test/files/neg/bug2148.scala
@@ -0,0 +1,10 @@
+class A {
+ var i = 0
+ trait A1 extends A {
+ i += 1
+ }
+}
+
+object Bob {
+ val b = new A with A#A1
+} \ No newline at end of file
diff --git a/test/files/neg/bug3123.check b/test/files/neg/bug3123.check
new file mode 100644
index 0000000000..8f5319c9a3
--- /dev/null
+++ b/test/files/neg/bug3123.check
@@ -0,0 +1,4 @@
+bug3123.scala:3: error: object Int is not a value
+ t match { case Int => true }
+ ^
+one error found
diff --git a/test/files/neg/bug3123.scala b/test/files/neg/bug3123.scala
new file mode 100644
index 0000000000..667a1da918
--- /dev/null
+++ b/test/files/neg/bug3123.scala
@@ -0,0 +1,5 @@
+object NotAValue {
+ def test[T](t : T) {
+ t match { case Int => true }
+ }
+}
diff --git a/test/files/neg/bug414.check b/test/files/neg/bug414.check
index c0f039ad26..ec23e26337 100644
--- a/test/files/neg/bug414.check
+++ b/test/files/neg/bug414.check
@@ -1,7 +1,3 @@
-bug414.scala:1: warning: case classes without a parameter list have been deprecated;
-use either case objects or case classes with `()' as parameter list.
-case class Empty[a] extends IntMap[a];
- ^
bug414.scala:5: error: pattern type is incompatible with expected type;
found : object Empty
required: IntMap[a]
@@ -12,5 +8,4 @@ bug414.scala:7: error: type mismatch;
required: a
case _ =>
^
-one warning found
two errors found
diff --git a/test/files/neg/bug961.check b/test/files/neg/bug961.check
index 8b407d1d0c..439ed98675 100644
--- a/test/files/neg/bug961.check
+++ b/test/files/neg/bug961.check
@@ -1,9 +1,4 @@
-bug961.scala:4: warning: case classes without a parameter list have been deprecated;
-use either case objects or case classes with `()' as parameter list.
- private case class B_inner extends A
- ^
bug961.scala:11: error: Temp.this.B of type object Temp.B does not take parameters
B() match {
^
-one warning found
one error found
diff --git a/test/files/neg/bug961.scala b/test/files/neg/bug961.scala
index 15309b96b4..088bddd7ee 100644
--- a/test/files/neg/bug961.scala
+++ b/test/files/neg/bug961.scala
@@ -1,7 +1,7 @@
-object Temp{
+object Temp {
abstract class A
- object B{
- private case class B_inner extends A
+ object B {
+ private case class B_inner() extends A
def apply: A = B_inner()
def unapply(a: A) = a match {
case B_inner() => true
diff --git a/test/files/neg/migration28.check b/test/files/neg/migration28.check
new file mode 100644
index 0000000000..9e042a0f0b
--- /dev/null
+++ b/test/files/neg/migration28.check
@@ -0,0 +1,9 @@
+migration28.scala:5: error: method ++= in class Stack has changed semantics:
+Stack ++= now pushes arguments on the stack from left to right.
+ s ++= List(1,2,3)
+ ^
+migration28.scala:7: error: method foreach in class Stack has changed semantics:
+Stack iterator and foreach now traverse in FIFO order.
+ s foreach (_ => ())
+ ^
+two errors found
diff --git a/test/files/neg/migration28.flags b/test/files/neg/migration28.flags
new file mode 100644
index 0000000000..f7025d0226
--- /dev/null
+++ b/test/files/neg/migration28.flags
@@ -0,0 +1 @@
+-Yfatal-warnings -Xmigration
diff --git a/test/files/neg/migration28.scala b/test/files/neg/migration28.scala
new file mode 100644
index 0000000000..090b32d690
--- /dev/null
+++ b/test/files/neg/migration28.scala
@@ -0,0 +1,12 @@
+object Test {
+ import scala.collection.mutable._
+
+ val s = new Stack[Int]
+ s ++= List(1,2,3)
+ s map (_ + 1)
+ s foreach (_ => ())
+
+ def main(args: Array[String]): Unit = {
+
+ }
+}
diff --git a/test/files/neg/multi-array.flags b/test/files/neg/multi-array.flags
new file mode 100644
index 0000000000..c36e713ab8
--- /dev/null
+++ b/test/files/neg/multi-array.flags
@@ -0,0 +1 @@
+-deprecation \ No newline at end of file
diff --git a/test/files/neg/patmat-type-check.check b/test/files/neg/patmat-type-check.check
new file mode 100644
index 0000000000..ab638b616d
--- /dev/null
+++ b/test/files/neg/patmat-type-check.check
@@ -0,0 +1,21 @@
+patmat-type-check.scala:18: error: scrutinee is incompatible with pattern type;
+ found : Seq[A]
+ required: java.lang.String
+ def f1 = "bob".reverse match { case Seq('b', 'o', 'b') => true } // fail
+ ^
+patmat-type-check.scala:19: error: scrutinee is incompatible with pattern type;
+ found : Seq[A]
+ required: Array[Char]
+ def f2 = "bob".toArray match { case Seq('b', 'o', 'b') => true } // fail
+ ^
+patmat-type-check.scala:23: error: scrutinee is incompatible with pattern type;
+ found : Seq[A]
+ required: Test.Bop2
+ def f3(x: Bop2) = x match { case Seq('b', 'o', 'b') => true } // fail
+ ^
+patmat-type-check.scala:27: error: scrutinee is incompatible with pattern type;
+ found : Seq[A]
+ required: Test.Bop3[T]
+ def f4[T](x: Bop3[T]) = x match { case Seq('b', 'o', 'b') => true } // fail
+ ^
+four errors found
diff --git a/test/files/neg/patmat-type-check.scala b/test/files/neg/patmat-type-check.scala
new file mode 100644
index 0000000000..c6c689b256
--- /dev/null
+++ b/test/files/neg/patmat-type-check.scala
@@ -0,0 +1,28 @@
+object Test
+{
+ def s1 = "bob".toList match { case Seq('b', 'o', 'b') => true } // list ok
+
+ // not final, allowed
+ class Bop
+ def s2(x: Bop) = x match { case Seq('b', 'o', 'b') => true }
+
+ // covariance, allowed
+ final class Bop4[+T]
+ def s3[T](x: Bop4[T]) = x match { case Seq('b', 'o', 'b') => true }
+
+ // contravariance, allowed
+ final class Bop5[T, U, -V]
+ def s4[T1, T2](x: Bop5[_, T1, T2]) = x match { case Seq('b', 'o', 'b') => true }
+
+ // String and Array are final/invariant, disallowed
+ def f1 = "bob".reverse match { case Seq('b', 'o', 'b') => true } // fail
+ def f2 = "bob".toArray match { case Seq('b', 'o', 'b') => true } // fail
+
+ // final, no type parameters, should be disallowed
+ final class Bop2
+ def f3(x: Bop2) = x match { case Seq('b', 'o', 'b') => true } // fail
+
+ // final, invariant type parameter, should be disallowed
+ final class Bop3[T]
+ def f4[T](x: Bop3[T]) = x match { case Seq('b', 'o', 'b') => true } // fail
+}
diff --git a/test/files/neg/patmatexhaust.check b/test/files/neg/patmatexhaust.check
index 1c46b6c9e5..ca769300c0 100644
--- a/test/files/neg/patmatexhaust.check
+++ b/test/files/neg/patmatexhaust.check
@@ -15,8 +15,8 @@ missing combination Qult Qult
def ma3(x:Mult) = (x,x) match { // not exhaustive
^
patmatexhaust.scala:49: warning: match is not exhaustive!
-missing combination Gu
missing combination Gp
+missing combination Gu
def ma4(x:Deep) = x match { // missing cases: Gu, Gp
^
diff --git a/test/files/neg/t0528neg.scala b/test/files/neg/t0528neg.scala
index 911745b763..30d20c95b1 100644
--- a/test/files/neg/t0528neg.scala
+++ b/test/files/neg/t0528neg.scala
@@ -3,7 +3,7 @@ trait Sequ[+A] {
}
class RichStr extends Sequ[Char] {
- // override to a primitve array
+ // override to a primitive array
def toArray: Array[Char] = new Array[Char](10)
}
diff --git a/test/files/neg/t0851.check b/test/files/neg/t0851.check
deleted file mode 100644
index 61d2a98632..0000000000
--- a/test/files/neg/t0851.check
+++ /dev/null
@@ -1,9 +0,0 @@
-t0851.scala:14: error: not enough arguments for method apply: (v1: Int,v2: String)java.lang.String in trait Function2.
-Unspecified value parameter v2.
- println(f(1))
- ^
-t0851.scala:22: error: not enough arguments for method apply: (v1: Int,v2: String)java.lang.String in trait Function2.
-Unspecified value parameter v2.
- println(fn(1))
- ^
-two errors found
diff --git a/test/files/neg/t0851.scala b/test/files/neg/t0851.scala
deleted file mode 100644
index b28be2c697..0000000000
--- a/test/files/neg/t0851.scala
+++ /dev/null
@@ -1,25 +0,0 @@
-package test
-
-// This gives now type errors about missing parameters, which seems OK to me.
-// The tests just make sure it does not crash
-
-object test1 {
- case class Foo[T,T2](f : (T,T2) => String) extends (((T,T2)) => String){
- def apply(t : T) = (s:T2) => f(t,s)
- def apply(p : (T,T2)) = f(p._1,p._2)
- }
- implicit def g[T](f : (T,String) => String) = Foo(f)
- def main(args : Array[String]) : Unit = {
- val f = (x:Int,s:String) => s + x
- println(f(1))
- ()
- }
-}
-object Main {
- def main(args : Array[String]) {
- val fn = (a : Int, str : String) => "a: " + a + ", str: " + str
- implicit def fx[T](f : (T,String) => String) = (x:T) => f(x,null)
- println(fn(1))
- ()
- }
-}
diff --git a/test/files/neg/t2179.check b/test/files/neg/t2179.check
index e454e117b5..aa94fabe1f 100644
--- a/test/files/neg/t2179.check
+++ b/test/files/neg/t2179.check
@@ -1,9 +1,9 @@
-t2179.scala:2: error: inferred type arguments [scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]{def sameElements[B >: Any](that: Iterable[B]): Boolean}]; def reverse: scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]]}; protected def thisCollection: Seq[Double]{def companion: scala.collection.generic.GenericCompanion[Seq[Any]]}; def dropRight(n: Int): scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]]}; def takeRight(n: Int): scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]]}; def slice(start: Int,end: Int): scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]]}; def take(n: Int): scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]]}; def drop(n: Int): scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]]}}] do not conform to method reduceLeft's type parameter bounds [B >: List[Double]]
+t2179.scala:2: error: inferred type arguments [scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]]; protected def thisCollection: Seq[Double]{def companion: scala.collection.generic.GenericCompanion[Seq[Any]]}}] do not conform to method reduceLeft's type parameter bounds [B >: List[Double]]
(Nil:List[List[Double]]).reduceLeft((_: Any, _: Any) => Nil.indices.map(_ => 0d))
^
t2179.scala:2: error: type mismatch;
found : (Any, Any) => scala.collection.immutable.IndexedSeq[Double]
- required: (scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]{def sameElements[B >: Any](that: Iterable[B]): Boolean}]; def reverse: scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]]}; protected def thisCollection: Seq[Double]{def companion: scala.collection.generic.GenericCompanion[Seq[Any]]}; def dropRight(n: Int): scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]]}; def takeRight(n: Int): scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]]}; def slice(start: Int,end: Int): scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]]}; def take(n: Int): scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]]}; def drop(n: Int): scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]]}}, List[Double]) => scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]{def sameElements[B >: Any](that: Iterable[B]): Boolean}]; def reverse: scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]]}; protected def thisCollection: Seq[Double]{def companion: scala.collection.generic.GenericCompanion[Seq[Any]]}; def dropRight(n: Int): scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]]}; def takeRight(n: Int): scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]]}; def slice(start: Int,end: Int): scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]]}; def take(n: Int): scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]]}; def drop(n: Int): scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]]}}
+ required: (scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]]; protected def thisCollection: Seq[Double]{def companion: scala.collection.generic.GenericCompanion[Seq[Any]]}}, List[Double]) => scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]]; protected def thisCollection: Seq[Double]{def companion: scala.collection.generic.GenericCompanion[Seq[Any]]}}
(Nil:List[List[Double]]).reduceLeft((_: Any, _: Any) => Nil.indices.map(_ => 0d))
^
two errors found
diff --git a/test/files/neg/t2386.check b/test/files/neg/t2386.check
new file mode 100644
index 0000000000..2caa46c731
--- /dev/null
+++ b/test/files/neg/t2386.check
@@ -0,0 +1,4 @@
+t2386.scala:2: error: could not find implicit value for evidence parameter of type scala.reflect.ClassManifest[Array[_ >: java.lang.String with Int]]
+ val a = Array(Array(1, 2), Array("a","b"))
+ ^
+one error found
diff --git a/test/files/neg/t2386.scala b/test/files/neg/t2386.scala
new file mode 100644
index 0000000000..56146cc5c3
--- /dev/null
+++ b/test/files/neg/t2386.scala
@@ -0,0 +1,3 @@
+object Test {
+ val a = Array(Array(1, 2), Array("a","b"))
+}
diff --git a/test/files/neg/t2918.check b/test/files/neg/t2918.check
new file mode 100644
index 0000000000..e67f24ec57
--- /dev/null
+++ b/test/files/neg/t2918.check
@@ -0,0 +1,7 @@
+t2918.scala:2: error: cyclic aliasing or subtyping involving type A
+ def g[X, A[X] <: A[X]](x: A[X]) = x
+ ^
+t2918.scala:2: error: A does not take type parameters
+ def g[X, A[X] <: A[X]](x: A[X]) = x
+ ^
+two errors found
diff --git a/test/files/neg/t2918.scala b/test/files/neg/t2918.scala
new file mode 100755
index 0000000000..ff2be39ae0
--- /dev/null
+++ b/test/files/neg/t2918.scala
@@ -0,0 +1,3 @@
+object Test {
+ def g[X, A[X] <: A[X]](x: A[X]) = x
+}
diff --git a/test/files/neg/t3006.check b/test/files/neg/t3006.check
new file mode 100644
index 0000000000..9a90d32b28
--- /dev/null
+++ b/test/files/neg/t3006.check
@@ -0,0 +1,6 @@
+t3006.scala:8: error: type mismatch;
+ found : java.lang.String("H")
+ required: Int
+ println(A(3) + "H")
+ ^
+one error found
diff --git a/test/files/neg/t3006.scala b/test/files/neg/t3006.scala
new file mode 100755
index 0000000000..f476c1717d
--- /dev/null
+++ b/test/files/neg/t3006.scala
@@ -0,0 +1,10 @@
+object Test extends Application {
+ case class A(x: Int);
+
+ class Foo(a: A) { println("Foo created!"); def +(x: Int) = new A(this.a.x + x); }
+
+ implicit def aToFoo(x: A) = new Foo(x);
+
+ println(A(3) + "H")
+
+}
diff --git a/test/files/neg/t3015.check b/test/files/neg/t3015.check
new file mode 100644
index 0000000000..32809b0669
--- /dev/null
+++ b/test/files/neg/t3015.check
@@ -0,0 +1,11 @@
+t3015.scala:7: error: scrutinee is incompatible with pattern type;
+ found : _$1 where type _$1
+ required: java.lang.String
+ val b(foo) = "foo"
+ ^
+t3015.scala:7: error: type mismatch;
+ found : _$1(in value foo) where type _$1(in value foo) <: java.lang.String
+ required: (some other)_$1(in value foo) where type (some other)_$1(in value foo)
+ val b(foo) = "foo"
+ ^
+two errors found
diff --git a/test/files/neg/t3015.scala b/test/files/neg/t3015.scala
new file mode 100644
index 0000000000..0bd53e7a03
--- /dev/null
+++ b/test/files/neg/t3015.scala
@@ -0,0 +1,8 @@
+class UnApp[P] {
+ def unapply(a: P): Option[P] = Some(a)
+}
+
+object Test extends Application {
+ val b: UnApp[_] = new UnApp[String]
+ val b(foo) = "foo"
+}
diff --git a/test/files/neg/t3115.check b/test/files/neg/t3115.check
new file mode 100644
index 0000000000..04f64eec3f
--- /dev/null
+++ b/test/files/neg/t3115.check
@@ -0,0 +1,10 @@
+t3115.scala:6: error: object Math in object sc is deprecated:
+ println(sc.Math)
+ ^
+t3115.scala:7: error: object Math in package scala is deprecated: use scala.math package instead
+ println(scala.Math)
+ ^
+t3115.scala:8: error: object Math in package scala is deprecated: use scala.math package instead
+ scala.Math.Pi
+ ^
+three errors found
diff --git a/test/files/neg/t3115.flags b/test/files/neg/t3115.flags
new file mode 100644
index 0000000000..bf8f88334b
--- /dev/null
+++ b/test/files/neg/t3115.flags
@@ -0,0 +1 @@
+-deprecation -Yfatal-warnings \ No newline at end of file
diff --git a/test/files/neg/t3115.scala b/test/files/neg/t3115.scala
new file mode 100755
index 0000000000..4aeeb4063e
--- /dev/null
+++ b/test/files/neg/t3115.scala
@@ -0,0 +1,9 @@
+object sc {
+ @deprecated("") object Math
+}
+
+object Test {
+ println(sc.Math)
+ println(scala.Math)
+ scala.Math.Pi
+}
diff --git a/test/files/neg/t3118.check b/test/files/neg/t3118.check
new file mode 100644
index 0000000000..da00f1c330
--- /dev/null
+++ b/test/files/neg/t3118.check
@@ -0,0 +1,7 @@
+t3118.scala:6: error: value C is not a member of O1
+ println(x.C()) // should not be accessible
+ ^
+t3118.scala:7: error: type C is not a member of O1
+ println(new x.C) // is correctly not accessible
+ ^
+two errors found
diff --git a/test/files/neg/t3118.scala b/test/files/neg/t3118.scala
new file mode 100644
index 0000000000..9be24c1ed4
--- /dev/null
+++ b/test/files/neg/t3118.scala
@@ -0,0 +1,8 @@
+class O1 {
+ private[this] case class C()
+
+ val x = new O1
+
+ println(x.C()) // should not be accessible
+ println(new x.C) // is correctly not accessible
+}
diff --git a/test/files/neg/t3222.check b/test/files/neg/t3222.check
new file mode 100644
index 0000000000..6170827cc9
--- /dev/null
+++ b/test/files/neg/t3222.check
@@ -0,0 +1,13 @@
+t3222.scala:1: error: not found: type B
+@throws(classOf[B])
+ ^
+t3222.scala:4: error: not found: type D
+ def foo(@throws(classOf[D]) x: Int) {}
+ ^
+t3222.scala:3: error: not found: type C
+ @throws(classOf[C])
+ ^
+t3222.scala:6: error: not found: type E
+ @throws(classOf[E])
+ ^
+four errors found
diff --git a/test/files/neg/t3222.scala b/test/files/neg/t3222.scala
new file mode 100644
index 0000000000..448292e8a7
--- /dev/null
+++ b/test/files/neg/t3222.scala
@@ -0,0 +1,9 @@
+@throws(classOf[B])
+class ExceptionTest {
+ @throws(classOf[C])
+ def foo(@throws(classOf[D]) x: Int) {}
+
+ @throws(classOf[E])
+ type t = String
+}
+
diff --git a/test/files/neg/unit2anyref.check b/test/files/neg/unit2anyref.check
index 7af4564ffb..2616fd35f9 100644
--- a/test/files/neg/unit2anyref.check
+++ b/test/files/neg/unit2anyref.check
@@ -1,10 +1,8 @@
unit2anyref.scala:2: error: type mismatch;
found : Unit
required: AnyRef
-Note that implicit conversions are not applicable because they are ambiguous:
- both method any2stringadd in object Predef of type (x: Any)scala.runtime.StringAdd
- and method any2ArrowAssoc in object Predef of type [A](x: A)ArrowAssoc[A]
- are possible conversion functions from Unit to AnyRef
+Note: primitive types are not implicitly converted to AnyRef.
+You can safely force boxing by casting x.asInstanceOf[AnyRef].
val x: AnyRef = () // this should not succeed.
^
one error found
diff --git a/test/files/pos/annotations.scala b/test/files/pos/annotations.scala
index d1bd6ba264..0819379d86 100644
--- a/test/files/pos/annotations.scala
+++ b/test/files/pos/annotations.scala
@@ -1,4 +1,5 @@
class ann(i: Int) extends Annotation
+class cfann(x: String) extends ClassfileAnnotation
// annotations on abstract types
abstract class C1[@serializable @cloneable +T, U, V[_]]
@@ -35,6 +36,10 @@ object Test {
// annotation on annotation constructor
@(ann @ann(100))(200) def foo() = 300
+
+ // #2984
+ private final val NAMESPACE = "/info"
+ @cfann(x = NAMESPACE + "/index") def index = "success"
}
// test forward references to getters / setters
@@ -84,3 +89,22 @@ trait BeanF {
def isG(): Boolean
def setG(nb: Boolean): Unit
}
+
+
+class Ann3(arr: Array[String]) extends ClassfileAnnotation
+class Ann4(i: Int) extends ClassfileAnnotation
+class Ann5(value: Class[_]) extends ClassfileAnnotation
+
+object Test3 {
+ final val i = 1083
+ final val cls = classOf[String]
+}
+
+class Test4 {
+ @Ann3(arr = Array("dlkfj", "DSF"))
+ @Ann4(i = 2908)
+ @Ann4(i = Test3.i)
+ @Ann5(value = classOf[Int])
+ @Ann5(Test3.cls)
+ def foo {}
+}
diff --git a/test/files/pos/bug0646.scala b/test/files/pos/bug0646.scala
index 64214f65b1..a56e857223 100644
--- a/test/files/pos/bug0646.scala
+++ b/test/files/pos/bug0646.scala
@@ -10,7 +10,7 @@ object xfor {
</bks>;
new NodeSeq { val theSeq = books.child } match {
- case t @ <title>Blabla</title> => t
+ case t @ Seq(<title>Blabla</title>) => t
}
//val n: NodeSeq = new NodeSeq { val theSeq = books.child }
diff --git a/test/files/pos/bug2310.scala b/test/files/pos/bug2310.scala
new file mode 100644
index 0000000000..68912b4961
--- /dev/null
+++ b/test/files/pos/bug2310.scala
@@ -0,0 +1,38 @@
+import scala.Stream._
+
+object consistencyError {
+ /* this gives an error:
+ Consistency problem compiling (virtual file)!
+ Trying to call method body%1(List(scala.collection.immutable.Stream[A])) with arguments (List(tp2, temp6, temp5))
+ case (l #:: ls, rs) => None
+ ^
+ scala.tools.nsc.symtab.Types$TypeError: too many arguments for method body%1: (val rs: scala.collection.immutable.Stream[A])None.type
+
+ two errors found
+ vss(0) =
+ args = List(tp2, temp6, temp5)
+ vss(1) = value rs, value ls, value l
+ args = List(tp2, temp6, temp5)
+ targets(0) = FinalState(,scala.None)
+ targets(1) = FinalState(,scala.None)
+ labels(1) = method body%1
+ labels(0) = method body%0
+ bx = 1
+ label.tpe = (val rs: scala.collection.immutable.Stream[A])None.type
+ */
+ def crash[A](lefts: Stream[A], rights: Stream[A]) = (lefts, rights) match {
+ case (Stream.Empty, Stream.Empty) => None
+ case (l #:: ls, rs) => None
+ }
+
+ // These work
+ // def works1[A](lefts: Stream[A]) = lefts match {
+ // case Stream.Empty => None
+ // case l #:: ls => None
+ // }
+ //
+ // def works2[A](lefts: Stream[A], rights: Stream[A]) = (lefts, rights) match {
+ // case (Stream.Empty, Stream.Empty) => None
+ // case (ls, rs) => None
+ // }
+}
diff --git a/test/files/pos/bug3097.flags b/test/files/pos/bug3097.flags
new file mode 100644
index 0000000000..570b15929d
--- /dev/null
+++ b/test/files/pos/bug3097.flags
@@ -0,0 +1 @@
+-unchecked -Yfatal-warnings
diff --git a/test/files/pos/bug3097.scala b/test/files/pos/bug3097.scala
new file mode 100644
index 0000000000..a034b960f7
--- /dev/null
+++ b/test/files/pos/bug3097.scala
@@ -0,0 +1,31 @@
+package seal
+
+sealed trait ISimpleValue
+
+sealed trait IListValue extends ISimpleValue {
+ def items: List[IAtomicValue[_]]
+}
+sealed trait IAtomicValue[O] extends ISimpleValue {
+ def data: O
+}
+
+sealed trait IAbstractDoubleValue[O] extends IAtomicValue[O] { }
+sealed trait IDoubleValue extends IAbstractDoubleValue[Double]
+
+case class ListValue(val items: List[IAtomicValue[_]]) extends IListValue
+class DoubleValue(val data: Double) extends IDoubleValue {
+ def asDouble = data
+}
+
+object Test {
+ /**
+ * @param args the command line arguments
+ */
+ def main(args: Array[String]): Unit = {
+ val v: ISimpleValue = new DoubleValue(1)
+ v match {
+ case m: IListValue => println("list")
+ case a: IAtomicValue[_] => println("atomic")
+ }
+ }
+}
diff --git a/test/files/pos/bug3136.scala b/test/files/pos/bug3136.scala
new file mode 100644
index 0000000000..33d42c2f3c
--- /dev/null
+++ b/test/files/pos/bug3136.scala
@@ -0,0 +1,19 @@
+class Type
+class Symbol
+case class PolyType(tps: List[Symbol], res: Type) extends Type
+class OtherType extends Type
+
+// case class NullaryMethodType(tp: Type) extends Type
+
+object NullaryMethodType {
+ def apply(resTpe: Type): Type = PolyType(List(), resTpe)
+ def unapply(tp: Type): Option[(Type)] = None
+}
+
+object Test {
+ def TEST(tp: Type): String =
+ tp match {
+ case PolyType(ps1, PolyType(ps2, res @ PolyType(a, b))) => "1"+tp // couldn't find a simpler version that still crashes
+ case NullaryMethodType(meh) => "2"+meh
+ }
+}
diff --git a/test/files/pos/bug3175.scala b/test/files/pos/bug3175.scala
new file mode 100644
index 0000000000..89bbf8b5fc
--- /dev/null
+++ b/test/files/pos/bug3175.scala
@@ -0,0 +1,7 @@
+object Test {
+ def f(g:{val update:Unit}) = g.update
+
+ def main(args: Array[String]): Unit = {
+
+ }
+}
diff --git a/test/files/pos/bug432.scala b/test/files/pos/bug432.scala
index 8e3097ac9d..087fd70aba 100644
--- a/test/files/pos/bug432.scala
+++ b/test/files/pos/bug432.scala
@@ -1,2 +1,2 @@
-case class Tata
+case class Tata()
object Tata
diff --git a/test/files/pos/bug715.cmds b/test/files/pos/bug715.cmds
new file mode 100644
index 0000000000..2836967fca
--- /dev/null
+++ b/test/files/pos/bug715.cmds
@@ -0,0 +1,2 @@
+scalac meredith_1.scala
+scalac runner_2.scala
diff --git a/test/files/pos/manifest1.scala b/test/files/pos/manifest1.scala
index 4d3b3bfa48..8901aa7437 100644
--- a/test/files/pos/manifest1.scala
+++ b/test/files/pos/manifest1.scala
@@ -13,8 +13,9 @@ object Test {
abstract class C { type T = String; val x: T }
val c = new C { val x = "abc" }
foo(c.x)
- abstract class D { type T; val x: T }
- val d: D = new D { type T = String; val x = "x" }
+ abstract class D { type T; implicit val m: Manifest[T]; val x: T }
+ val stringm = implicitly[Manifest[String]]
+ val d: D = new D { type T = String; val m = stringm; val x = "x" }
+ import d.m
foo(d.x)
-
}
diff --git a/test/files/pos/scan.scala b/test/files/pos/scan.scala
new file mode 100644
index 0000000000..47e0a7d976
--- /dev/null
+++ b/test/files/pos/scan.scala
@@ -0,0 +1,23 @@
+
+
+
+
+object Test {
+
+ def main(args: Array[String]) {
+ val lst = List(1, 2, 3, 4, 5)
+
+ assert(lst.scanLeft(0)(_ + _) == List(0, 1, 3, 6, 10, 15))
+ assert(lst.scanRight(0)(_ + _) == List(15, 14, 12, 9, 5, 0))
+
+ val emp = List[Int]()
+ assert(emp.scanLeft(0)(_ + _) == List(0))
+ assert(emp.scanRight(0)(_ + _) == List(0))
+
+ val stream = Stream(1, 2, 3, 4, 5)
+ assert(stream.scanLeft(0)(_ + _) == Stream(0, 1, 3, 6, 10, 15))
+
+ assert(Stream.from(1).scanLeft(0)(_ + _).take(5) == Stream(0, 1, 3, 6, 10))
+ }
+
+} \ No newline at end of file
diff --git a/test/files/pos/spec-List.scala b/test/files/pos/spec-List.scala
index ad864abd7c..c5bc3aa5fd 100644
--- a/test/files/pos/spec-List.scala
+++ b/test/files/pos/spec-List.scala
@@ -23,10 +23,10 @@ import annotation.tailrec
* @author Martin Odersky and others
* @version 2.8
*/
-sealed abstract class List[@specialized +A] extends LinearSeq[A]
+sealed trait List[@specialized +A] extends LinearSeq[A]
with Product
with GenericTraversableTemplate[A, List]
- with LinearSeqLike[A, List[A]] {
+ with LinearSeqOptimized[A, List[A]] {
override def companion: GenericCompanion[List] = List
import scala.collection.{Iterable, Traversable, Seq}
@@ -144,18 +144,12 @@ sealed abstract class List[@specialized +A] extends LinearSeq[A]
/** Create a new list which contains all elements of this list
* followed by all elements of Traversable `that'
*/
- override def ++[B >: A, That](that: Traversable[B])(implicit bf: CanBuildFrom[List[A], B, That]): That = {
+ override def ++[B >: A, That](xs: TraversableOnce[B])(implicit bf: CanBuildFrom[List[A], B, That]): That = {
val b = bf(this)
- if (b.isInstanceOf[ListBuffer[_]]) (this ::: that.toList).asInstanceOf[That]
- else super.++(that)
+ if (b.isInstanceOf[ListBuffer[_]]) (this ::: xs.toList).asInstanceOf[That]
+ else super.++(xs)
}
- /** Create a new list which contains all elements of this list
- * followed by all elements of Iterator `that'
- */
- override def ++[B >: A, That](that: Iterator[B])(implicit bf: CanBuildFrom[List[A], B, That]): That =
- this ++ that.toList
-
/** Overrides the method in Iterable for efficiency.
*
* @return the list itself
@@ -654,7 +648,7 @@ object List extends SeqFactory[List] {
*
* @param arr the array to convert
* @param start the first index to consider
- * @param len the lenght of the range to convert
+ * @param len the length of the range to convert
* @return a list that contains the same elements than <code>arr</code>
* in the same order
*/
diff --git a/test/files/pos/spec-arrays.scala b/test/files/pos/spec-arrays.scala
index c1f253a39d..96c75dd8de 100644
--- a/test/files/pos/spec-arrays.scala
+++ b/test/files/pos/spec-arrays.scala
@@ -30,7 +30,7 @@ final class IntArray(arr: Array[Int]) extends AbsArray[Int] {
def length: Int = arr.length
}
-final class GenericArray[T](arr: Array[T]) extends AbsArray[T] {
+final class ArraySeq[T](arr: Array[T]) extends AbsArray[T] {
def apply(idx: Int): T = arr(idx)
def update(idx: Int, elem: T) = arr(idx) = elem
def length: Int = arr.length
@@ -96,7 +96,7 @@ class ScalaSpec2Test extends Test {
}
class ScalaWrapTest extends Test {
- val arr: AbsArray[Int] = new GenericArray(new Array[Int](1000))
+ val arr: AbsArray[Int] = new ArraySeq(new Array[Int](1000))
def sum(): Int = {
var acc = 0
@@ -116,7 +116,7 @@ class ScalaWrapTest extends Test {
}
class ScalaGenTest extends Test {
- val arr: AbsArray[Integer] = new GenericArray(new Array[Integer](1000))
+ val arr: AbsArray[Integer] = new ArraySeq(new Array[Integer](1000))
for (i <- 0 until arr.length) arr(i) = new Integer(0)
def sum(): Int = {
diff --git a/test/files/pos/spec-partially.flags b/test/files/pos/spec-partially.flags
new file mode 100644
index 0000000000..973517e1c9
--- /dev/null
+++ b/test/files/pos/spec-partially.flags
@@ -0,0 +1 @@
+-Yspecialize
diff --git a/test/files/pos/spec-partially.scala b/test/files/pos/spec-partially.scala
new file mode 100644
index 0000000000..90778e42a8
--- /dev/null
+++ b/test/files/pos/spec-partially.scala
@@ -0,0 +1,5 @@
+/** Test case for partially specialized classes. see #2880. */
+
+class Arc[State, @specialized T](label: T, to: State)
+
+
diff --git a/test/files/pos/super.cmds b/test/files/pos/super.cmds
new file mode 100644
index 0000000000..8f3f8a4172
--- /dev/null
+++ b/test/files/pos/super.cmds
@@ -0,0 +1,2 @@
+javac Super_1.java
+scalac Super_2.scala
diff --git a/test/files/pos/switchUnbox.flags b/test/files/pos/switchUnbox.flags
index ec1ad20e3a..cdf5f84ede 100644
--- a/test/files/pos/switchUnbox.flags
+++ b/test/files/pos/switchUnbox.flags
@@ -1 +1 @@
--Xsqueeze:on
+-Ysqueeze:on
diff --git a/test/pending/pos/t0816.scala b/test/files/pos/t0816.scala
index 44282ea872..0128a0ad72 100644
--- a/test/pending/pos/t0816.scala
+++ b/test/files/pos/t0816.scala
@@ -1,6 +1,6 @@
abstract class Atest(val data: String)
-case class Btest(override val data: String, val b: boolean) extends Atest(data)
+case class Btest(override val data: String, val b: Boolean) extends Atest(data)
case class Ctest(override val data: String) extends Btest(data, true)
diff --git a/test/files/pos/t0971.java b/test/files/pos/t0971.java
deleted file mode 100644
index 160dc2c5af..0000000000
--- a/test/files/pos/t0971.java
+++ /dev/null
@@ -1,4 +0,0 @@
-class A {
- int y = 1, z;
- static Object x = new java.util.HashMap<Object , Object > () ;
-}
diff --git a/test/files/pos/t0999.scala b/test/files/pos/t0999.scala
deleted file mode 100644
index c384820af1..0000000000
--- a/test/files/pos/t0999.scala
+++ /dev/null
@@ -1,5 +0,0 @@
-object A {
- val d: Double = Math.sqrt(5 match {
- case x: Double => x
- })
-}
diff --git a/test/files/pos/t1029.cmds b/test/files/pos/t1029.cmds
new file mode 100644
index 0000000000..06b863dc03
--- /dev/null
+++ b/test/files/pos/t1029.cmds
@@ -0,0 +1,2 @@
+scalac Test_1.scala
+scalac Test_2.scala
diff --git a/test/pending/pos/t1035.scala b/test/files/pos/t1035.scala
index a280a415d2..a280a415d2 100644
--- a/test/pending/pos/t1035.scala
+++ b/test/files/pos/t1035.scala
diff --git a/test/files/pos/t1164.scala b/test/files/pos/t1164.scala
index b238bf54d9..ca780c9a26 100644
--- a/test/files/pos/t1164.scala
+++ b/test/files/pos/t1164.scala
@@ -12,7 +12,7 @@ object test {
def matchAndGetArgFromFoo[a]( e:Foo[a]):a = {e match { case Foo(x) => x }}
- // Try the same thing as above but use function as arguemnt to Bar
+ // Try the same thing as above but use function as argument to Bar
// constructor
type FunIntToA [a] = (Int) => a
diff --git a/test/files/pos/t1380.flags b/test/files/pos/t1380.flags
deleted file mode 100644
index f0b0ef7f51..0000000000
--- a/test/files/pos/t1380.flags
+++ /dev/null
@@ -1 +0,0 @@
--cp pending/pos/t1380/gnujaxp.jar
diff --git a/test/files/pos/t1751.cmds b/test/files/pos/t1751.cmds
new file mode 100644
index 0000000000..d4a4898ffd
--- /dev/null
+++ b/test/files/pos/t1751.cmds
@@ -0,0 +1,3 @@
+javac SuiteClasses.java
+scalac A2_1.scala
+scalac A1_2.scala
diff --git a/test/files/pos/t1756.scala b/test/files/pos/t1756.scala
index 4f7202114c..d5d3ddc624 100755
--- a/test/files/pos/t1756.scala
+++ b/test/files/pos/t1756.scala
@@ -15,7 +15,7 @@ expected type from x+, because the lhs x is still typed as a Poly[A].
This means that the argument of the implicit conversion is typechecked
with expected type A with Poly[A]. And no solution is found.
-To solve this, I added a fallback scheme similar to implicit arguents:
+To solve this, I added a fallback scheme similar to implicit arguments:
When an implicit view that adds a method matching given arguments and result
type fails, try again without the result type.
*/
diff --git a/test/files/pos/t1782.cmds b/test/files/pos/t1782.cmds
new file mode 100644
index 0000000000..61f3d3788e
--- /dev/null
+++ b/test/files/pos/t1782.cmds
@@ -0,0 +1,2 @@
+javac Ann.java Days.java ImplementedBy.java
+scalac Test_1.scala
diff --git a/test/pending/pos/t1836/J.java b/test/files/pos/t1836/J.java
index a009a59e21..a009a59e21 100644
--- a/test/pending/pos/t1836/J.java
+++ b/test/files/pos/t1836/J.java
diff --git a/test/pending/pos/t1836/S.scala b/test/files/pos/t1836/S.scala
index 88ce1063e9..88ce1063e9 100644
--- a/test/pending/pos/t1836/S.scala
+++ b/test/files/pos/t1836/S.scala
diff --git a/test/files/pos/t1942.cmds b/test/files/pos/t1942.cmds
new file mode 100644
index 0000000000..c14311042a
--- /dev/null
+++ b/test/files/pos/t1942.cmds
@@ -0,0 +1,2 @@
+scalac A_1.scala
+scalac Test_2.scala
diff --git a/test/pending/pos/t1996.scala b/test/files/pos/t1996.scala
index 2730128196..2730128196 100644
--- a/test/pending/pos/t1996.scala
+++ b/test/files/pos/t1996.scala
diff --git a/test/files/pos/t2421c.scala b/test/files/pos/t2421c.scala
new file mode 100644
index 0000000000..755e6a39f0
--- /dev/null
+++ b/test/files/pos/t2421c.scala
@@ -0,0 +1,17 @@
+object Test {
+ class A
+ class B
+ class C
+ class F[X]
+
+ def f(implicit aa: F[A]) = println(aa)
+
+ implicit def a : F[A] = new F[A]()
+
+ // generalised from t2421b to verify we check enough
+ class G[X]
+ implicit def g[X] = new G[X]()
+ implicit def b[X <: B](implicit mx: G[X]) = new F[X]()
+
+ f
+} \ No newline at end of file
diff --git a/test/files/pos/t2433/A.java b/test/files/pos/t2433/A.java
new file mode 100755
index 0000000000..340690c402
--- /dev/null
+++ b/test/files/pos/t2433/A.java
@@ -0,0 +1,4 @@
+class A223 extends B223.Inner {
+ static class Inner {}
+ void foo() {}
+} \ No newline at end of file
diff --git a/test/files/pos/t2433/B.java b/test/files/pos/t2433/B.java
new file mode 100755
index 0000000000..151dd71ca1
--- /dev/null
+++ b/test/files/pos/t2433/B.java
@@ -0,0 +1,4 @@
+class B223 {
+ static class Inner {}
+ void m(A223.Inner x) {}
+} \ No newline at end of file
diff --git a/test/files/pos/t2433/Test.scala b/test/files/pos/t2433/Test.scala
new file mode 100755
index 0000000000..02fd89b646
--- /dev/null
+++ b/test/files/pos/t2433/Test.scala
@@ -0,0 +1,3 @@
+object Test {
+ (new A223).foo()
+}
diff --git a/test/files/pos/t2464.cmds b/test/files/pos/t2464.cmds
new file mode 100644
index 0000000000..ca733ef23d
--- /dev/null
+++ b/test/files/pos/t2464.cmds
@@ -0,0 +1,3 @@
+javac JavaOne.java
+scalac ScalaOne_1.scala
+scalac t2464_2.scala
diff --git a/test/pending/pos/t2610.scala b/test/files/pos/t2610.scala
index 8dd4cde66e..8dd4cde66e 100644
--- a/test/pending/pos/t2610.scala
+++ b/test/files/pos/t2610.scala
diff --git a/test/pending/pos/t2660.scala b/test/files/pos/t2660.scala
index b1908b201b..b1908b201b 100644
--- a/test/pending/pos/t2660.scala
+++ b/test/files/pos/t2660.scala
diff --git a/test/pending/pos/t2691.scala b/test/files/pos/t2691.scala
index ba2e52f1fe..ba2e52f1fe 100644
--- a/test/pending/pos/t2691.scala
+++ b/test/files/pos/t2691.scala
diff --git a/test/files/pos/t2726.cmds b/test/files/pos/t2726.cmds
new file mode 100644
index 0000000000..5fcb18bfbb
--- /dev/null
+++ b/test/files/pos/t2726.cmds
@@ -0,0 +1,2 @@
+scalac SQLBuilder_1.scala
+scalac test_2.scala
diff --git a/test/files/pos/t2741/2741-1.scala b/test/files/pos/t2741/2741-1.scala
new file mode 100644
index 0000000000..91c120ec1b
--- /dev/null
+++ b/test/files/pos/t2741/2741-1.scala
@@ -0,0 +1,13 @@
+sealed trait Kleisli[M[_], A, B]
+
+trait PartialApplyKA[T[_[_], _, _], M[_], A] {
+ type Apply[B] = T[M, A, B]
+}
+
+trait MA[M[_], A]
+
+trait MAs {
+ val a: MA[PartialApplyKA[Kleisli, List, String]#Apply, Int] = null
+}
+
+object Scalaz extends MAs
diff --git a/test/files/pos/t2741/2741-2.scala b/test/files/pos/t2741/2741-2.scala
new file mode 100644
index 0000000000..41f6a64260
--- /dev/null
+++ b/test/files/pos/t2741/2741-2.scala
@@ -0,0 +1,5 @@
+// object Test compiles jointly, but not separately.
+object Test {
+ import Scalaz._
+ Scalaz.a
+} \ No newline at end of file
diff --git a/test/files/pos/t2795.scala b/test/files/pos/t2795.scala
index c355a10c54..a4e1b7db83 100644
--- a/test/files/pos/t2795.scala
+++ b/test/files/pos/t2795.scala
@@ -5,6 +5,7 @@ trait Element[T] {
trait Config {
type T <: Element[T]
+ implicit val m: ClassManifest[T]
// XXX Following works fine:
// type T <: Element[_]
}
diff --git a/test/files/pos/t2797.scala b/test/files/pos/t2797.scala
new file mode 100644
index 0000000000..4323664e91
--- /dev/null
+++ b/test/files/pos/t2797.scala
@@ -0,0 +1,9 @@
+class MyVector[A] {
+ def map[B](f: A => B): MyVector[B] = error("")
+}
+
+object Test {
+ def unzip[B, C](_this: MyVector[(B, C)]): (MyVector[B], MyVector[C]) = {
+ (_this.map{ bc => bc._1 }, _this.map{ bc => bc._2 })
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/t2867.scala b/test/files/pos/t2867.scala
deleted file mode 100644
index 0434a380b9..0000000000
--- a/test/files/pos/t2867.scala
+++ /dev/null
@@ -1 +0,0 @@
-case class A(l: List[_]*)
diff --git a/test/files/pos/t2868.cmds b/test/files/pos/t2868.cmds
new file mode 100644
index 0000000000..ed8124a9e0
--- /dev/null
+++ b/test/files/pos/t2868.cmds
@@ -0,0 +1,3 @@
+javac Jann.java Nest.java
+scalac pick_1.scala
+scalac test_2.scala
diff --git a/test/files/pos/t2868/Jann.java b/test/files/pos/t2868/Jann.java
new file mode 100644
index 0000000000..f5b68de7b0
--- /dev/null
+++ b/test/files/pos/t2868/Jann.java
@@ -0,0 +1,5 @@
+public @interface Jann {
+ public String str();
+ public Nest inn();
+ public int[] arr();
+}
diff --git a/test/files/pos/t2868/Nest.java b/test/files/pos/t2868/Nest.java
new file mode 100644
index 0000000000..53652291ad
--- /dev/null
+++ b/test/files/pos/t2868/Nest.java
@@ -0,0 +1,3 @@
+public @interface Nest {
+ public int value();
+}
diff --git a/test/files/pos/t2868/pick_1.scala b/test/files/pos/t2868/pick_1.scala
new file mode 100644
index 0000000000..e91728ec2f
--- /dev/null
+++ b/test/files/pos/t2868/pick_1.scala
@@ -0,0 +1,7 @@
+class ann(s: String) extends StaticAnnotation
+class pick {
+ final val s = "bang!"
+ @ann("bang!") def foo = 1
+ @Jann(str = "bang!", inn = new Nest(1), arr = Array(1, 2)) def bar = 2
+ @Jann(str = "bang!", inn = new Nest(1), arr = Array(1, 2)) def baz = 3
+}
diff --git a/test/files/pos/t2868/test_2.scala b/test/files/pos/t2868/test_2.scala
new file mode 100644
index 0000000000..f11ef0fae2
--- /dev/null
+++ b/test/files/pos/t2868/test_2.scala
@@ -0,0 +1,6 @@
+class test {
+ val l = (new pick).s
+ val u = (new pick).foo
+ val c = (new pick).bar
+ val k = (new pick).baz
+}
diff --git a/test/files/pos/t2913.scala b/test/files/pos/t2913.scala
new file mode 100755
index 0000000000..11d8b92053
--- /dev/null
+++ b/test/files/pos/t2913.scala
@@ -0,0 +1,53 @@
+class A {
+ def foo(a: Int) = 0
+}
+
+class RichA {
+ def foo(a: String) = 0
+ def foo(a: String, b: String) = 0
+ def foo() = 0
+}
+
+object Test {
+
+ implicit def AToRichA(a: A) = new RichA
+
+ val a = new A
+ a.foo()
+ a.foo(1)
+
+ a.foo("") // Without implicits, a type error regarding invalid argument types is generated at `""`. This is
+ // the same position as an argument, so the 'second try' typing with an Implicit View is tried,
+ // and AToRichA(a).foo("") is found.
+ //
+ // My reading of the spec "7.3 Views" is that `a.foo` denotes a member of `a`, so the view should
+ // not be triggered.
+ //
+ // But perhaps the implementation was changed to solve See https://lampsvn.epfl.ch/trac/scala/ticket/1756
+
+ a.foo("a", "b") // Without implicits, a type error regarding invalid arity is generated at `foo(<error>"", "")`.
+ // Typers#tryTypedApply:3274 only checks if the error is as the same position as `foo`, `"a"`, or `"b"`.
+ // None of these po
+}
+
+// t0851 is essentially the same:
+object test1 {
+ case class Foo[T,T2](f : (T,T2) => String) extends (((T,T2)) => String){
+ def apply(t : T) = (s:T2) => f(t,s)
+ def apply(p : (T,T2)) = f(p._1,p._2)
+ }
+ implicit def g[T](f : (T,String) => String) = Foo(f)
+ def main(args : Array[String]) : Unit = {
+ val f = (x:Int,s:String) => s + x
+ println(f(1))
+ ()
+ }
+}
+object Main {
+ def main(args : Array[String]) {
+ val fn = (a : Int, str : String) => "a: " + a + ", str: " + str
+ implicit def fx[T](f : (T,String) => String) = (x:T) => f(x,null)
+ println(fn(1))
+ ()
+ }
+}
diff --git a/test/files/pos/t294.cmds b/test/files/pos/t294.cmds
new file mode 100644
index 0000000000..62c9a5a068
--- /dev/null
+++ b/test/files/pos/t294.cmds
@@ -0,0 +1,3 @@
+javac Ann.java Ann2.java
+scalac Test_1.scala
+scalac Test_2.scala
diff --git a/test/files/pos/t2940/Cycle.java b/test/files/pos/t2940/Cycle.java
new file mode 100644
index 0000000000..eef6c23b5e
--- /dev/null
+++ b/test/files/pos/t2940/Cycle.java
@@ -0,0 +1,3 @@
+public interface Cycle<T extends Cycle<?>> {
+ void doStuff();
+} \ No newline at end of file
diff --git a/test/files/pos/t2940/Error.scala b/test/files/pos/t2940/Error.scala
new file mode 100644
index 0000000000..bf5a6bd0df
--- /dev/null
+++ b/test/files/pos/t2940/Error.scala
@@ -0,0 +1,12 @@
+abstract class Error {
+ val c: Cycle[_]
+}
+
+object Test {
+ trait Quux[T] extends Cycle[Quux[T]]
+ val x = new Quux[Int] { def doStuff() { } }
+
+ def main(args: Array[String]): Unit = {
+
+ }
+}
diff --git a/test/files/pos/t2956/BeanDefinitionVisitor.java b/test/files/pos/t2956/BeanDefinitionVisitor.java
new file mode 100644
index 0000000000..2ff5daa253
--- /dev/null
+++ b/test/files/pos/t2956/BeanDefinitionVisitor.java
@@ -0,0 +1,6 @@
+import java.util.Map;
+public class BeanDefinitionVisitor {
+ @SuppressWarnings("unchecked")
+ protected void visitMap(Map<?, ?> mapVal) {
+ }
+}
diff --git a/test/files/pos/t2956/t2956.scala b/test/files/pos/t2956/t2956.scala
new file mode 100755
index 0000000000..eb6e817465
--- /dev/null
+++ b/test/files/pos/t2956/t2956.scala
@@ -0,0 +1,7 @@
+import scala.collection.JavaConversions._
+
+class Outer {
+ protected class Inner extends BeanDefinitionVisitor {
+ protected def visitMap(mapVal: Map[_, _]): Unit = ()
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/t2994a.scala b/test/files/pos/t2994a.scala
new file mode 100644
index 0000000000..cb4a389e2f
--- /dev/null
+++ b/test/files/pos/t2994a.scala
@@ -0,0 +1,27 @@
+object Naturals {
+ trait NAT {
+ type a[s[_ <: NAT] <: NAT, z <: NAT] <: NAT
+ type v = a[SUCC, ZERO]
+ }
+ final class ZERO extends NAT {
+ type a[s[_ <: NAT] <: NAT, z <: NAT] = z
+ }
+ final class SUCC[n <: NAT] extends NAT {
+ type a[s[_ <: NAT] <: NAT, z <: NAT] = s[n#a[s, z]]
+ }
+ type _0 = ZERO
+ type _1 = SUCC[_0]
+ type _2 = SUCC[_1]
+ type _3 = SUCC[_2]
+ type _4 = SUCC[_3]
+ type _5 = SUCC[_4]
+ type _6 = SUCC[_5]
+
+
+ // crashes scala-2.8.0 beta1
+ trait MUL[n <: NAT, m <: NAT] extends NAT {
+ trait curry[n[_[_], _], s[_]] { type f[z <: NAT] = n[s, z] }
+ type a[s[_ <: NAT] <: NAT, z <: NAT] = n#a[curry[m#a, s]#f, z]
+ }
+
+} \ No newline at end of file
diff --git a/test/files/pos/t2994b.scala b/test/files/pos/t2994b.scala
new file mode 100644
index 0000000000..c9d9cc812b
--- /dev/null
+++ b/test/files/pos/t2994b.scala
@@ -0,0 +1,7 @@
+object Test {
+ trait Bar[X[_]]
+ trait Baz[S[_] <: Bar[S]] {
+ type Apply[T]
+ }
+ trait Foo[V[_] <: Bar[V]] extends Bar[Baz[V]#Apply]
+} \ No newline at end of file
diff --git a/test/files/pos/t3037.scala b/test/files/pos/t3037.scala
new file mode 100644
index 0000000000..b71ffe0418
--- /dev/null
+++ b/test/files/pos/t3037.scala
@@ -0,0 +1,13 @@
+package test
+
+object A {
+ println(("a" match {
+ case "a" => 1
+ case _ => "a"
+ }).asInstanceOf[Object])
+ def foo[T](x: T) = x
+ var x: Int = 1
+ var y: Long = 1L
+ x = foo(x)
+ y = foo(y)
+}
diff --git a/test/files/pos/t3071.scala b/test/files/pos/t3071.scala
new file mode 100644
index 0000000000..7e14432941
--- /dev/null
+++ b/test/files/pos/t3071.scala
@@ -0,0 +1,7 @@
+class A (val i: Int) {
+ def copy (i: Int = this.i): A = new A(i)
+}
+
+class B (val j: Int) extends A(1) {
+ override def copy (j: Int = this.j): B = new B(j)
+}
diff --git a/test/files/pos/t3076/C2.scala b/test/files/pos/t3076/C2.scala
new file mode 100644
index 0000000000..d08f9ee81d
--- /dev/null
+++ b/test/files/pos/t3076/C2.scala
@@ -0,0 +1,4 @@
+class C2 {
+ def m1() { new T { } }
+ def m2() { new T { } }
+}
diff --git a/test/files/pos/t3076/T.scala b/test/files/pos/t3076/T.scala
new file mode 100644
index 0000000000..b710a29343
--- /dev/null
+++ b/test/files/pos/t3076/T.scala
@@ -0,0 +1,2 @@
+trait T { private val z = new C1 }
+private class C1
diff --git a/test/files/pos/t3079.scala b/test/files/pos/t3079.scala
new file mode 100644
index 0000000000..fa732ea516
--- /dev/null
+++ b/test/files/pos/t3079.scala
@@ -0,0 +1,17 @@
+sealed trait Identity[A] {
+ val value: A
+}
+
+trait Coerce[A, B] {
+ def unwrap: (A => B)
+}
+
+object Coerce {
+ def IdentityCoerce[B] = new Coerce[Identity[B], B] {
+ // java.lang.Error: A in trait Identity cannot be instantiated from ?x$1.type
+ def unwrap = _.value
+
+ // Providing the type of _ works around the problem.
+ //def unwrap = (_: Identity[B]).value
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/t3152.scala b/test/files/pos/t3152.scala
new file mode 100644
index 0000000000..a20428dbee
--- /dev/null
+++ b/test/files/pos/t3152.scala
@@ -0,0 +1,20 @@
+trait Applicative[M[_]]
+
+sealed trait MA[M[_], A] {
+ def sequence[N[_], B](implicit a: A <:< N[B], n: Applicative[N]): N[M[B]] = error("stub")
+ // def sequence3[N[_], B]()(implicit a: A <:< N[B], n: Applicative[N]): N[M[B]] = error("stub")
+}
+
+object test {
+ implicit def ListMA[A](l: List[A]): MA[List, A] = error("stub")
+ implicit val ao: Applicative[Option] = error("stub")
+
+ /* This compiles OK:
+ (Nil: List[Option[Int]]).sequence3(): Option[List[Int]]
+ */
+
+ // BUG: error: immutable is not an enclosing class
+ // !!! No line number is reported with the error
+ (Nil: List[Option[Int]]).sequence: Option[List[Int]]
+ (List[Option[Int]]()).sequence: Option[List[Int]]
+} \ No newline at end of file
diff --git a/test/pending/pos/t425.scala b/test/files/pos/t425.scala
index e50c50ac35..e50c50ac35 100644
--- a/test/pending/pos/t425.scala
+++ b/test/files/pos/t425.scala
diff --git a/test/files/positions/Unsupported2.scala b/test/files/positions/Unsupported2.scala
new file mode 100644
index 0000000000..e21df5ef0e
--- /dev/null
+++ b/test/files/positions/Unsupported2.scala
@@ -0,0 +1,5 @@
+object Unsupported2 {
+ for (k <- 0 until xs.xize)
+ for(i = 0 until (xs.size)) {
+ }
+}
diff --git a/test/files/res/bug687.check b/test/files/res/bug687.check
index ee9520d1ea..353101c38b 100644
--- a/test/files/res/bug687.check
+++ b/test/files/res/bug687.check
@@ -1,11 +1,10 @@
-
nsc>
-nsc> bug687/QueryB.scala:3: error: name clash between defined and inherited member:
+nsc>
+bug687/QueryB.scala:3: error: name clash between defined and inherited member:
method equals:(o: java.lang.Object)Boolean and
method equals:(x$1: Any)Boolean in class Any
have same type after erasure: (o: java.lang.Object)Boolean
override def equals(o : Object) = false;
^
-
nsc>
nsc>
diff --git a/test/files/run/Course-2002-13.scala b/test/files/run/Course-2002-13.scala
index 27551b735b..c016d41a90 100644
--- a/test/files/run/Course-2002-13.scala
+++ b/test/files/run/Course-2002-13.scala
@@ -66,7 +66,7 @@ object Terms {
override def toString() =
a + (if (ts.isEmpty) "" else ts.mkString("(", ",", ")"));
def map(s: Subst): Term = Con(a, ts map (t => t map s));
- def tyvars = (ts flatMap (t => t.tyvars)).removeDuplicates;
+ def tyvars = (ts flatMap (t => t.tyvars)).distinct;
}
private var count = 0;
@@ -113,7 +113,7 @@ object Programs {
case class Clause(lhs: Term, rhs: List[Term]) {
def tyvars =
- (lhs.tyvars ::: (rhs flatMap (t => t.tyvars))).removeDuplicates;
+ (lhs.tyvars ::: (rhs flatMap (t => t.tyvars))).distinct;
def newInstance = {
var s: Subst = List();
for (val a <- tyvars) { s = Binding(a, newVar(a)) :: s }
diff --git a/test/files/run/ReplacementMatching.scala b/test/files/run/ReplacementMatching.scala
new file mode 100644
index 0000000000..05040d98a3
--- /dev/null
+++ b/test/files/run/ReplacementMatching.scala
@@ -0,0 +1,47 @@
+
+
+
+import util.matching._
+
+
+
+
+object Test {
+
+ def main(args: Array[String]) {
+ replacementMatching
+ groupsMatching
+ }
+
+ def replacementMatching {
+ val regex = """\$\{(.+?)\}""".r
+ val replaced = regex.replaceAllIn("Replacing: ${main}. And another method: ${foo}.",
+ (m: util.matching.Regex.Match) => {
+ val identifier = m.group(1)
+ identifier
+ })
+ assert(replaced == "Replacing: main. And another method: foo.")
+
+ val regex3 = """\$\{(.+?)\}""".r
+ val replaced3 = regex3.replaceSomeIn("Replacing: ${main}. And another: ${foo}.", (m: util.matching.Regex.Match) => {
+ val id = m.group(1)
+ if (id.startsWith("m")) Some(id) else None
+ })
+ assert(replaced3 == "Replacing: main. And another: ${foo}.")
+ }
+
+ def groupsMatching {
+ val Date = """(\d+)/(\d+)/(\d+)""".r
+ for (Regex.Groups(a, b, c) <- Date findFirstMatchIn "1/1/2001 marks the start of the millenium. 31/12/2000 doesn't.") {
+ assert(a == "1")
+ assert(b == "1")
+ assert(c == "2001")
+ }
+ for (Regex.Groups(a, b, c) <- (Date findAllIn "1/1/2001 marks the start of the millenium. 31/12/2000 doesn't.").matchData) {
+ assert(a == "1" || a == "31")
+ assert(b == "1" || b == "12")
+ assert(c == "2001" || c == "2000")
+ }
+ }
+
+}
diff --git a/test/files/run/arraycopy.scala b/test/files/run/arraycopy.scala
new file mode 100644
index 0000000000..bb06200dc7
--- /dev/null
+++ b/test/files/run/arraycopy.scala
@@ -0,0 +1,31 @@
+
+
+object Test {
+ def main(args: Array[String]) {
+ val a = new Array[Int](10)
+ val b = new Array[Any](10)
+ for (i <- 0 until 10) b(i) = i
+
+ Array.copy(b, 3, a, 3, 7)
+ assert(a.toSeq == List(0, 0, 0, 3, 4, 5, 6, 7, 8, 9))
+ }
+}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/test/files/run/arybufgrow.scala b/test/files/run/arybufgrow.scala
index 4dccd962f2..9e18435243 100644
--- a/test/files/run/arybufgrow.scala
+++ b/test/files/run/arybufgrow.scala
@@ -2,8 +2,8 @@ import scala.collection.mutable._;
object Test extends Application {
val buf = new ArrayBuffer[String];
- for(val i <- List.range(0,1000)) {
- buf + "hello";
+ for (i <- List.range(0,1000)) {
+ buf += "hello";
}
Console.println("1000 = " + buf.length);
diff --git a/test/files/run/bigDecimalCache.scala b/test/files/run/bigDecimalCache.scala
new file mode 100644
index 0000000000..c0c709a50f
--- /dev/null
+++ b/test/files/run/bigDecimalCache.scala
@@ -0,0 +1,9 @@
+object Test {
+ def main(args: Array[String]): Unit = {
+ val bd5a = BigDecimal(5)
+ val mc = java.math.MathContext.DECIMAL32
+ val bd5b = BigDecimal(5,mc)
+
+ assert(bd5b.mc == mc)
+ }
+}
diff --git a/test/files/run/bug1074.check b/test/files/run/bug1074.check
index ecff8510ed..ccf1cb1551 100644
--- a/test/files/run/bug1074.check
+++ b/test/files/run/bug1074.check
@@ -1,3 +1,3 @@
-q0 = Set(kl, jk, cd, fg, a, ef, gh, de, hj, b, lm, mn)
+q0 = Set(kl, jk, cd, fg, ef, gh, a, de, hj, b, lm, mn)
q1 = Set() 0
q2 = Set() 0
diff --git a/test/files/run/bug3126.scala b/test/files/run/bug3126.scala
new file mode 100644
index 0000000000..36322bf896
--- /dev/null
+++ b/test/files/run/bug3126.scala
@@ -0,0 +1,9 @@
+object Test {
+ case class C(x: Int)
+ val v: Some[Int] = null
+
+ def main(args: Array[String]): Unit = {
+ try C.unapply(null) catch { case _: MatchError => }
+ try v match { case Some(1) => } catch { case _: MatchError => }
+ }
+}
diff --git a/test/files/run/bug3175.check b/test/files/run/bug3175.check
new file mode 100644
index 0000000000..12246140f4
--- /dev/null
+++ b/test/files/run/bug3175.check
@@ -0,0 +1,11 @@
+10
+15
+3
+3
+3
+5
+5
+5
+100
+jabooboo
+hi mom
diff --git a/test/files/run/bug3175.scala b/test/files/run/bug3175.scala
new file mode 100644
index 0000000000..78660d4085
--- /dev/null
+++ b/test/files/run/bug3175.scala
@@ -0,0 +1,55 @@
+/** A bit down the road this test will examine
+ * the bytecode.
+ */
+object Test {
+ def len(x:{ def length: Int }) = x.length
+ def f1(x:{ def apply(x: Int): Long }) = x(0)
+ def f2(x:{ def apply(x: Int): Byte }) = x(0)
+ def f3(x:{ def apply(x: Int): String }) = x(0).length
+
+ def f4(x:{ def update(x: Int, y: Long): Unit }, y: Long) = x(0) = y
+ def f5(x:{ def update(x: Int, y: Byte): Unit }, y: Byte) = x(0) = y
+ def f6(x:{ def update(x: Int, y: String): Unit }, y: String) = x(0) = y
+
+ def f7(x: { def length: Any }) = x.length
+
+ def f8(x: { def apply(x: Int): Any }) = x(0)
+ def f9(x: { def apply(x: Int): Int }) = x(0)
+ def f10(x: { def apply(x: Int): Long }) = x(0)
+
+ // update has some interesting special cases
+ def f11(x:{ def update(x: Int, y: Long): Any }, y: Long) = x(0) = y
+ def f12(x:{ def update(x: Int, y: String): AnyVal }, y: String) = x(0) = y
+ def f13(x:{ def update(x: Int, y: String): AnyRef }, y: String) = x(0) = y
+
+ // doesn't work yet, see #3197
+ // def fclone(x:{ def clone(): AnyRef }) = x.clone()
+
+ def main(args: Array[String]): Unit = {
+ val longs = Array(5L)
+ val bytes = Array(5: Byte)
+ val strs = Array("abcde", "fghjij")
+
+ println(len(Array(1,2,3)) + len(Array(4.0,5.0f)) + len(Array("abc", 5)) + len("bop"))
+ println(f1(longs) + f2(bytes) + f3(strs))
+
+ f4(longs, 1)
+ f5(bytes, 1)
+ f6(strs, "a")
+
+ println(f1(longs) + f2(bytes) + f3(strs))
+
+ println(f7(Array(1,2,3)))
+ println(f7("def"))
+
+ println(f8(Array(5)))
+ println(f9(Array(5)))
+ println(f10(Array(5)))
+
+ f11(longs, 100L)
+ f12(strs, "jabooboo")
+ println(longs(0))
+ println(strs(0))
+ f13(new { def update(x: Int, y: String): List[Int] = { println("hi mom") ; Nil } }, "irrelevant")
+ }
+}
diff --git a/test/files/run/bug594.scala b/test/files/run/bug594.scala
index 0c3be3d5de..f923a3cd2a 100644
--- a/test/files/run/bug594.scala
+++ b/test/files/run/bug594.scala
@@ -2,7 +2,7 @@ object Test {
def main(args: Array[String]): Unit = {
val array = Array("one", "two", "three")
val firstTwo: Array[String] = array.slice(0,2)
- for(val x <- firstTwo)
+ for (x <- firstTwo)
Console.println(x)
}
}
diff --git a/test/files/run/bug751.scala b/test/files/run/bug751.scala
new file mode 100644
index 0000000000..294d3af5c2
--- /dev/null
+++ b/test/files/run/bug751.scala
@@ -0,0 +1,6 @@
+object Test {
+ def main(args: Array[String]): Unit = {
+ val map = Map(1 -> "a", 2 -> "b", 3 -> "c")
+ assert(map.filterKeys(_ % 2 == 0).isInstanceOf[scala.collection.immutable.Map[_,_]])
+ }
+}
diff --git a/test/files/run/bytecodecs.scala b/test/files/run/bytecodecs.scala
new file mode 100644
index 0000000000..bf8a0f8ed3
--- /dev/null
+++ b/test/files/run/bytecodecs.scala
@@ -0,0 +1,39 @@
+import scala.reflect.generic.ByteCodecs._
+
+object Test {
+
+ def test8to7(xs: Array[Byte]) {
+ val ys = encode8to7(xs)
+ decode7to8(ys, ys.length)
+ assert(ys.take(xs.length).deep == xs.deep,
+ "test8to7("+xs.deep+") failed, result = "+ys.take(xs.length).deep)
+ }
+
+ def testAll(xs: Array[Byte]) {
+ val ys = encode(xs)
+ decode(ys)
+ assert(ys.take(xs.length).deep == xs.deep,
+ "testAll("+xs.deep+") failed, result = "+ys.take(xs.length).deep)
+ }
+
+ def test(inputs: Array[Byte]*) {
+ for (input <- inputs) {
+ test8to7(input)
+ testAll(input)
+ }
+ }
+
+ def main(args: Array[String]) {
+ test(
+ Array(1, 2, 3),
+ Array(1, 2, 3, 4, 5, 6, 7),
+ Array(1, -2, 0, -3, -5, -6, -7),
+ Array(1, 3, -1, -128, 0, 0, -128, 1, 2, 3))
+ val rand = new scala.util.Random()
+ for (i <- 1 until 5000) {
+ var xs = new Array[Byte](i)
+ rand.nextBytes(xs)
+ test(xs)
+ }
+ }
+}
diff --git a/test/files/run/caseClassEquality.scala b/test/files/run/caseClassEquality.scala
new file mode 100644
index 0000000000..c11d7ad0d1
--- /dev/null
+++ b/test/files/run/caseClassEquality.scala
@@ -0,0 +1,36 @@
+object Test {
+ abstract class A1
+ case class C1(x: Int) extends A1
+ class C2(x: Int) extends C1(x) {
+ override def productPrefix = "Shazbot!"
+ }
+ class C3(x: Int) extends C1(x) {
+ override def canEqual(other: Any) = other.isInstanceOf[C3]
+ override def equals(other: Any) = other match {
+ case ob: C3 => x == ob.x
+ case _ => false
+ }
+ }
+
+ case class CS1(xs: Any*)
+ class CS2(xs: Seq[_]*) extends CS1(xs: _*)
+ class CS3(xs: IndexedSeq[Int]*) extends CS2(xs: _*)
+
+ case class H1(x: Int, y: Double)
+ class H2(x: Double, y: Int) extends H1(y, x)
+
+ def main(args: Array[String]): Unit = {
+ assert(C1(5) == new C2(5))
+ assert(new C2(5) == C1(5))
+ assert(C1(5).hashCode == new C2(5).hashCode)
+ assert(new C2(5).hashCode == C1(5).hashCode)
+
+ assert(C1(5) != new C3(5))
+ assert(new C3(5) != C1(5))
+
+ assert(CS1(List(1d,2d), Seq[Float](3f, 4f)) == new CS3(IndexedSeq(1,2), IndexedSeq(3, 4)))
+
+ assert(H1(5, 10d) == new H2(10d, 5))
+ assert(H1(5, 10d).hashCode == new H2(10d, 5).hashCode)
+ }
+}
diff --git a/test/files/run/colltest1.check b/test/files/run/colltest1.check
index b49d328be4..7377174281 100644
--- a/test/files/run/colltest1.check
+++ b/test/files/run/colltest1.check
@@ -72,11 +72,11 @@ new test starting with List()
9: List(2, 3, 4, 5, 6, 7, 8, 9, 10)
1
List(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
-new test starting with IndexedSeq()
-10: IndexedSeq(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
-9: IndexedSeq(2, 3, 4, 5, 6, 7, 8, 9, 10)
+new test starting with Vector()
+10: Vector(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
+9: Vector(2, 3, 4, 5, 6, 7, 8, 9, 10)
1
-IndexedSeq(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
+Vector(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
new test starting with Vector()
10: Vector(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
9: Vector(2, 3, 4, 5, 6, 7, 8, 9, 10)
@@ -95,15 +95,15 @@ ArrayBuffer(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
true
false
true
-Map(O -> O, W -> W, H -> H, P -> P, G -> G, V -> V, I -> I, A -> A, F -> F, U -> U, N -> N, X -> X, Z -> Z, S -> S, D -> D, K -> K, R -> R, C -> C, B -> B, L -> L, Q -> Q, M -> M, J -> J, Y -> Y, T -> T, E -> E)
-Map(O -> O, W -> W, H -> H, P -> P, G -> G, V -> V, I -> I, A -> A, F -> F, U -> U, N -> N, X -> X, Z -> Z, S -> S, D -> D, K -> K, R -> R, C -> C, B -> B, L -> L, Q -> Q, M -> M, J -> J, Y -> Y, T -> T, E -> E)
-Map(O -> O, W -> W, H -> H, P -> P, V -> V, G -> G, I -> I, A -> A, F -> F, U -> U, N -> N, X -> X, Z -> Z, S -> S, D -> D, K -> K, R -> R, C -> C, B -> B, L -> L, Q -> Q, M -> M, J -> J, Y -> Y, T -> T, E -> E)
-Map(O -> O, W -> W, H -> H, P -> P, V -> V, G -> G, I -> I, A -> A, F -> F, U -> U, N -> N, X -> X, Z -> Z, S -> S, D -> D, K -> K, R -> R, C -> C, B -> B, L -> L, Q -> Q, M -> M, J -> J, Y -> Y, T -> T, E -> E)
-Map(O -> O, W -> W, H -> H, P -> P, G -> G, V -> V, I -> I, A -> A, F -> F, U -> U, N -> N, X -> X, Z -> Z, S -> S, D -> D, K -> K, R -> R, C -> C, B -> B, L -> L, Q -> Q, M -> M, J -> J, Y -> Y, T -> T, E -> E)
-Map(O -> O, W -> W, H -> H, P -> P, G -> G, V -> V, I -> I, A -> A, F -> F, U -> U, N -> N, X -> X, Z -> Z, S -> S, D -> D, K -> K, R -> R, C -> C, B -> B, L -> L, Q -> Q, M -> M, J -> J, Y -> Y, T -> T, E -> E)
-Map(A -> A, B -> B, C -> C, D -> D, E -> E, F -> F, G -> G, H -> H, I -> I, J -> J, K -> K, L -> L, M -> M, N -> N, O -> O, P -> P, Q -> Q, R -> R, S -> S, T -> T, U -> U, V -> V, W -> W, X -> X, Y -> Y, Z -> Z)
-Map(A -> A, B -> B, C -> C, D -> D, E -> E, F -> F, G -> G, H -> H, I -> I, J -> J, K -> K, L -> L, M -> M, N -> N, O -> O, P -> P, Q -> Q, R -> R, S -> S, T -> T, U -> U, V -> V, W -> W, X -> X, Y -> Y, Z -> Z)
-Map(O -> O, W -> W, H -> H, P -> P, V -> V, G -> G, I -> I, A -> A, F -> F, U -> U, N -> N, X -> X, Z -> Z, S -> S, D -> D, K -> K, R -> R, C -> C, B -> B, L -> L, Q -> Q, M -> M, J -> J, Y -> Y, T -> T, E -> E)
-Map(O -> O, W -> W, H -> H, P -> P, V -> V, G -> G, I -> I, A -> A, F -> F, U -> U, N -> N, X -> X, Z -> Z, S -> S, D -> D, K -> K, R -> R, C -> C, B -> B, L -> L, Q -> Q, M -> M, J -> J, Y -> Y, T -> T, E -> E)
-Map(O -> O, W -> W, H -> H, P -> P, G -> G, V -> V, I -> I, A -> A, F -> F, U -> U, N -> N, X -> X, Z -> Z, S -> S, D -> D, K -> K, R -> R, C -> C, B -> B, L -> L, Q -> Q, M -> M, J -> J, Y -> Y, T -> T, E -> E)
-Map(O -> O, W -> W, H -> H, P -> P, G -> G, V -> V, I -> I, A -> A, F -> F, U -> U, N -> N, X -> X, Z -> Z, S -> S, D -> D, K -> K, R -> R, C -> C, B -> B, L -> L, Q -> Q, M -> M, J -> J, Y -> Y, T -> T, E -> E)
+List((A,A), (B,B), (C,C), (D,D), (E,E), (F,F), (G,G), (H,H), (I,I), (J,J), (K,K), (L,L), (M,M), (N,N), (O,O), (P,P), (Q,Q), (R,R), (S,S), (T,T), (U,U), (V,V), (W,W), (X,X), (Y,Y), (Z,Z))
+List((A,A), (B,B), (C,C), (D,D), (E,E), (F,F), (G,G), (H,H), (I,I), (J,J), (K,K), (L,L), (M,M), (N,N), (O,O), (P,P), (Q,Q), (R,R), (S,S), (T,T), (U,U), (V,V), (W,W), (X,X), (Y,Y), (Z,Z))
+List((A,A), (B,B), (C,C), (D,D), (E,E), (F,F), (G,G), (H,H), (I,I), (J,J), (K,K), (L,L), (M,M), (N,N), (O,O), (P,P), (Q,Q), (R,R), (S,S), (T,T), (U,U), (V,V), (W,W), (X,X), (Y,Y), (Z,Z))
+List((A,A), (B,B), (C,C), (D,D), (E,E), (F,F), (G,G), (H,H), (I,I), (J,J), (K,K), (L,L), (M,M), (N,N), (O,O), (P,P), (Q,Q), (R,R), (S,S), (T,T), (U,U), (V,V), (W,W), (X,X), (Y,Y), (Z,Z))
+List((A,A), (B,B), (C,C), (D,D), (E,E), (F,F), (G,G), (H,H), (I,I), (J,J), (K,K), (L,L), (M,M), (N,N), (O,O), (P,P), (Q,Q), (R,R), (S,S), (T,T), (U,U), (V,V), (W,W), (X,X), (Y,Y), (Z,Z))
+List((A,A), (B,B), (C,C), (D,D), (E,E), (F,F), (G,G), (H,H), (I,I), (J,J), (K,K), (L,L), (M,M), (N,N), (O,O), (P,P), (Q,Q), (R,R), (S,S), (T,T), (U,U), (V,V), (W,W), (X,X), (Y,Y), (Z,Z))
+List((A,A), (B,B), (C,C), (D,D), (E,E), (F,F), (G,G), (H,H), (I,I), (J,J), (K,K), (L,L), (M,M), (N,N), (O,O), (P,P), (Q,Q), (R,R), (S,S), (T,T), (U,U), (V,V), (W,W), (X,X), (Y,Y), (Z,Z))
+List((A,A), (B,B), (C,C), (D,D), (E,E), (F,F), (G,G), (H,H), (I,I), (J,J), (K,K), (L,L), (M,M), (N,N), (O,O), (P,P), (Q,Q), (R,R), (S,S), (T,T), (U,U), (V,V), (W,W), (X,X), (Y,Y), (Z,Z))
+List((A,A), (B,B), (C,C), (D,D), (E,E), (F,F), (G,G), (H,H), (I,I), (J,J), (K,K), (L,L), (M,M), (N,N), (O,O), (P,P), (Q,Q), (R,R), (S,S), (T,T), (U,U), (V,V), (W,W), (X,X), (Y,Y), (Z,Z))
+List((A,A), (B,B), (C,C), (D,D), (E,E), (F,F), (G,G), (H,H), (I,I), (J,J), (K,K), (L,L), (M,M), (N,N), (O,O), (P,P), (Q,Q), (R,R), (S,S), (T,T), (U,U), (V,V), (W,W), (X,X), (Y,Y), (Z,Z))
+List((A,A), (B,B), (C,C), (D,D), (E,E), (F,F), (G,G), (H,H), (I,I), (J,J), (K,K), (L,L), (M,M), (N,N), (O,O), (P,P), (Q,Q), (R,R), (S,S), (T,T), (U,U), (V,V), (W,W), (X,X), (Y,Y), (Z,Z))
+List((A,A), (B,B), (C,C), (D,D), (E,E), (F,F), (G,G), (H,H), (I,I), (J,J), (K,K), (L,L), (M,M), (N,N), (O,O), (P,P), (Q,Q), (R,R), (S,S), (T,T), (U,U), (V,V), (W,W), (X,X), (Y,Y), (Z,Z))
diff --git a/test/files/run/colltest1.scala b/test/files/run/colltest1.scala
index f12c234f74..557282cb8d 100644
--- a/test/files/run/colltest1.scala
+++ b/test/files/run/colltest1.scala
@@ -80,7 +80,7 @@ object Test extends Application {
val tenPlus = ten map (_ + 1)
assert((ten zip tenPlus) forall { case (x, y) => x + 1 == y })
val dble = ten flatMap (x => List(x, x))
- assert(dble.removeDuplicates == ten)
+ assert(dble.distinct == ten)
assert(ten.length == 10)
assert(ten(0) == 1 && ten(9) == 10)
assert((ten lengthCompare 10) == 0 && (ten lengthCompare 1) > 0 && (ten lengthCompare 11) < 0)
@@ -122,7 +122,7 @@ object Test extends Application {
assert((ten diff (ten filter (_ % 2 == 0))) == (ten filterNot (_ % 2 == 0)))
assert((ten intersect ten) == ten)
assert((ten intersect List(5)) == List(5))
- assert((ten ++ ten).removeDuplicates == ten)
+ assert((ten ++ ten).distinct == ten)
assert(ten.patch(3, List(4, 5, 6, 7), 4) == ten)
assert(ten.patch(0, List(1, 2, 3), 9) == List(1, 2, 3, 10))
assert(empty.padTo(10, 7) == Array.fill(10)(7).toSeq)
@@ -170,7 +170,7 @@ object Test extends Application {
m += (("D" -> "D"), ("E" -> "E"), ("F" -> "F"))
m ++= List(("G" -> "G"), ("H" -> "H"), ("I" -> "I"))
m ++= ('J' to 'Z') map (x => (x.toString -> x.toString))
- println(m)
+ println(m.toList.sorted)
assert(!m.isEmpty)
assert(m.keySet forall (k => (m get k) == Some(k)))
assert(m.keySet forall (k => (m apply k) == k))
@@ -185,7 +185,7 @@ object Test extends Application {
assert(mm.isEmpty, mm)
def m3 = empty ++ m1
assert(m1 == m3)
- println(m3)
+ println(m3.toList.sorted)
val m4 = m3 filterNot { case (k, v) => k != "A" }
assert(m4.size == 1, m4)
}
diff --git a/test/files/run/constrained-types.check b/test/files/run/constrained-types.check
index f18ff6e3c9..cc00a7c46b 100644
--- a/test/files/run/constrained-types.check
+++ b/test/files/run/constrained-types.check
@@ -121,7 +121,7 @@ y: java.lang.String = hello
-----
val x = 3 : Int @Annot(e+f+g+h) //should have a graceful error message
-<console>:5: error: not found: value e
+<console>:6: error: not found: value e
val x = 3 : Int @Annot(e+f+g+h) //should have a graceful error message
^
diff --git a/test/files/run/elidable.check b/test/files/run/elidable.check
new file mode 100644
index 0000000000..4ce04f0040
--- /dev/null
+++ b/test/files/run/elidable.check
@@ -0,0 +1 @@
+Good for me, I was not elided.
diff --git a/test/files/run/elidable.flags b/test/files/run/elidable.flags
new file mode 100644
index 0000000000..93fd3d5317
--- /dev/null
+++ b/test/files/run/elidable.flags
@@ -0,0 +1 @@
+-Xelide-below 900
diff --git a/test/files/run/elidable.scala b/test/files/run/elidable.scala
new file mode 100644
index 0000000000..2a527efc4b
--- /dev/null
+++ b/test/files/run/elidable.scala
@@ -0,0 +1,16 @@
+import annotation._
+import elidable._
+
+object Test {
+ @elidable(FINEST) def f1() = assert(false, "Should have been elided.")
+ @elidable(INFO) def f2() = assert(false, "Should have been elided.")
+ @elidable(SEVERE) def f3() = println("Good for me, I was not elided.")
+ @elidable(INFO) def f4 = assert(false, "Should have been elided (no parens).")
+
+ def main(args: Array[String]): Unit = {
+ f1()
+ f2()
+ f3()
+ f4
+ }
+}
diff --git a/test/files/run/equality.scala b/test/files/run/equality.scala
index 5b9ad207da..6498b232e1 100644
--- a/test/files/run/equality.scala
+++ b/test/files/run/equality.scala
@@ -1,6 +1,8 @@
// a quickly assembled test of equality. Needs work.
object Test
{
+ import scala.runtime.ScalaRunTime.hash
+
def makeFromInt(x: Int) = List(
x.toByte, x.toShort, x.toInt, x.toLong, x.toFloat, x.toDouble, BigInt(x), BigDecimal(x)
) ::: (
diff --git a/test/files/run/hashCodeBoxesRunTime.scala b/test/files/run/hashCodeBoxesRunTime.scala
index 3eacacb663..ba1a30f5fb 100644
--- a/test/files/run/hashCodeBoxesRunTime.scala
+++ b/test/files/run/hashCodeBoxesRunTime.scala
@@ -5,7 +5,7 @@ object Test
import java.{ lang => jl }
import scala.runtime.BoxesRunTime.{ hashFromNumber, hashFromObject }
- def allSame[T](xs: List[T]) = assert(xs.removeDuplicates.size == 1, "failed: " + xs)
+ def allSame[T](xs: List[T]) = assert(xs.distinct.size == 1, "failed: " + xs)
def mkNumbers(x: Int): List[Number] =
List(x.toByte, x.toShort, x, x.toLong, x.toFloat, x.toDouble)
diff --git a/test/files/run/hashCodeDistribution.scala b/test/files/run/hashCodeDistribution.scala
index dbb6e833bd..5be9d1db6d 100644
--- a/test/files/run/hashCodeDistribution.scala
+++ b/test/files/run/hashCodeDistribution.scala
@@ -8,7 +8,7 @@ object Test {
val hashCodes =
for (x <- 0 until COUNT; y <- 0 until COUNT) yield C(x,y).hashCode
- val uniques = hashCodes.removeDuplicates
+ val uniques = hashCodes.distinct
val collisionRate = (totalCodes - uniques.size) * 1000 / totalCodes
assert(collisionRate < 5, "Collision rate too high: %d / 1000".format(collisionRate))
diff --git a/test/files/run/hashhash.scala b/test/files/run/hashhash.scala
new file mode 100644
index 0000000000..4a34ab12e0
--- /dev/null
+++ b/test/files/run/hashhash.scala
@@ -0,0 +1,15 @@
+object Test
+{
+ class A { val x1 = this.## ; val x2 = super.## }
+ val myA = new A
+ assert(myA.x1 == myA.x2)
+
+ def confirmSame(x: Any) = assert(x.## == x.hashCode, "%s.## != %s.hashCode".format(x, x))
+ def confirmDifferent(x: Any) = assert(x.## != x.hashCode, "%s.## == %s.hashCode (but should not)".format(x, x))
+
+ def main(args: Array[String]): Unit = {
+ /** Just a little sanity check, not to be confused with a unit test. */
+ List(5, 5.5f, "abc", new AnyRef, new A, ()) foreach confirmSame
+ List(5.0f, 1.0d, -(5.0f), (-1.0d)) foreach confirmDifferent
+ }
+}
diff --git a/test/files/run/iterators.check b/test/files/run/iterators.check
index cd1b79144a..bb139c1610 100644
--- a/test/files/run/iterators.check
+++ b/test/files/run/iterators.check
@@ -7,7 +7,7 @@ test check_drop was successful
test check_foreach was successful
test check_forall was successful
test check_fromArray was successful
-test check_collect was successful
+test check_toSeq was successful
test check_indexOf was successful
test check_findIndexOf was successful
diff --git a/test/files/run/iterators.scala b/test/files/run/iterators.scala
index 5879d38df3..5f77289343 100644
--- a/test/files/run/iterators.scala
+++ b/test/files/run/iterators.scala
@@ -83,8 +83,8 @@ object Test {
xs0.length + xs1.length + xs2.length + xs3.length + xs4.length
}
- def check_collect: String =
- List(1, 2, 3, 4, 5).iterator.collect.mkString("x")
+ def check_toSeq: String =
+ List(1, 2, 3, 4, 5).iterator.toSeq.mkString("x")
def check_indexOf: String = {
val i = List(1, 2, 3, 4, 5).indexOf(4)
@@ -93,8 +93,8 @@ object Test {
}
def check_findIndexOf: String = {
- val i = List(1, 2, 3, 4, 5).findIndexOf { x: Int => x >= 4 }
- val j = List(1, 2, 3, 4, 5).findIndexOf { x: Int => x >= 16 }
+ val i = List(1, 2, 3, 4, 5).indexWhere { x: Int => x >= 4 }
+ val j = List(1, 2, 3, 4, 5).indexWhere { x: Int => x >= 16 }
"" + i + "x" + j
}
@@ -124,7 +124,7 @@ object Test {
check_success("check_foreach", check_foreach, 190)
check_success("check_forall", check_forall, 0)
check_success("check_fromArray",check_fromArray, 14)
- check_success("check_collect", check_collect, "1x2x3x4x5")
+ check_success("check_toSeq", check_toSeq, "1x2x3x4x5")
check_success("check_indexOf", check_indexOf, "3x-1")
check_success("check_findIndexOf", check_findIndexOf, "3x-1")
println()
diff --git a/test/files/run/lists.scala b/test/files/run/lists.scala
index 695d5a0336..6c9b55961e 100644
--- a/test/files/run/lists.scala
+++ b/test/files/run/lists.scala
@@ -126,8 +126,8 @@ object Test1 extends TestCase("ctor") with Assert {
assertEquals("check_forall", true, b1 & b2)
}
{
- val ys1 = xs1 remove { e => e % 2 != 0 }
- val ys2 = xs4 remove { e => e < 5 }
+ val ys1 = xs1 filterNot { e => e % 2 != 0 }
+ val ys2 = xs4 filterNot { e => e < 5 }
assertEquals("check_remove", 3, ys1.length + ys2.length)
}
{
diff --git a/test/files/run/names-defaults.check b/test/files/run/names-defaults.check
index 47d09a5083..60c7637e3d 100644
--- a/test/files/run/names-defaults.check
+++ b/test/files/run/names-defaults.check
@@ -81,6 +81,7 @@ get: 20
get: 20
20
0
+1
dlkfj0dlkfj102
lskf2dkflj2
dlkd5nixda10nixdadklfj1dklfj
@@ -119,3 +120,4 @@ klfj1
blublu1
my text
List(1, 2)
+3
diff --git a/test/files/run/names-defaults.scala b/test/files/run/names-defaults.scala
index 121ddcb3bc..8557047875 100644
--- a/test/files/run/names-defaults.scala
+++ b/test/files/run/names-defaults.scala
@@ -135,6 +135,10 @@ object Test extends Application {
println(bn4())
println(bn4(a = 0))
+ class t2929(x: => Int = 1) {
+ def foo = x
+ }
+ println((new t2929()).foo)
// constructors
val a1 = new A(b = "dlkfj")(d = 102)
@@ -252,6 +256,9 @@ object Test extends Application {
def test11[T[P]](x: T[T[List[T[X forSome { type X }]]]] = List(1,2)) = x
// (cannot call f using the default, List(1,2) doesn't match the param type)
+ def multinest = { def bar(x: Int = 1) = { def bar(x: Int = 2) = x; bar() + x }; bar() }
+ println(multinest)
+
// #2290
def spawn(a: Int, b: => Unit) = { () }
@@ -289,6 +296,14 @@ object Test extends Application {
class C extends A
}
+ object t3178 {
+ def foo(x: String) = x
+ def foo(x: Int) = x
+ def bar(foo: Int) = foo
+ bar(foo = 1)
+ }
+
+
// DEFINITIONS
def test1(a: Int, b: String) = println(a +": "+ b)
def test2(u: Int, v: Int)(k: String, l: Int) = println(l +": "+ k +", "+ (u + v))
diff --git a/test/files/run/programmatic-main.check b/test/files/run/programmatic-main.check
new file mode 100644
index 0000000000..82118f8ece
--- /dev/null
+++ b/test/files/run/programmatic-main.check
@@ -0,0 +1,26 @@
+parser
+namer
+packageobjects
+typer
+superaccessors
+pickler
+refchecks
+selectiveanf
+liftcode
+selectivecps
+uncurry
+tailcalls
+explicitouter
+erasure
+lazyvals
+lambdalift
+constructors
+flatten
+mixin
+cleanup
+icode
+inliner
+closelim
+dce
+jvm
+terminal
diff --git a/test/files/run/programmatic-main.scala b/test/files/run/programmatic-main.scala
new file mode 100644
index 0000000000..9c7d0b7095
--- /dev/null
+++ b/test/files/run/programmatic-main.scala
@@ -0,0 +1,12 @@
+import scala.tools.nsc._
+import io.Path
+
+object Test {
+ val cwd = Option(System.getProperty("partest.cwd")) getOrElse "."
+ val basedir = Path(cwd).parent / "lib" path
+ val baseargs = Array("-usejavacp", "-bootclasspath", basedir + "/scala-library.jar", "-cp", basedir + "/scala-compiler.jar")
+
+ def main(args: Array[String]): Unit = {
+ Main process (baseargs ++ Array("-Xshow-phases"))
+ }
+}
diff --git a/test/files/run/range.scala b/test/files/run/range.scala
index 65d33a6134..02b48fad7c 100644
--- a/test/files/run/range.scala
+++ b/test/files/run/range.scala
@@ -44,6 +44,9 @@ object Test {
assert(NumericRange.inclusive(1, 10, 1) sameElements (1 to 10))
assert(NumericRange.inclusive(1, 100, 3) sameElements (1 to 100 by 3))
+ // #2518
+ assert((3L to 7 by 2) sameElements List(3L, 5L, 7L))
+
rangeForeach(1 to 10);
rangeForeach(1 until 10);
rangeForeach(10 to 1 by -1);
diff --git a/test/files/run/regularpatmat.check b/test/files/run/regularpatmat.check
deleted file mode 100644
index 3417d9a98a..0000000000
--- a/test/files/run/regularpatmat.check
+++ /dev/null
@@ -1,126 +0,0 @@
-pretest
-passed ok
-testWR_1
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-testWR_2
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-testWR_3
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-testWR_4
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-testWR_5
-passed ok
-passed ok
-passed ok
-testWR_6
-passed ok
-passed ok
-testWR_7
-passed ok
-testWR_8
-passed ok
-testWS
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-testWT
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-testWV
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-testBK
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-testBM
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-BN preTest: true
-testBN
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-testBO
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-testMZ - bugs #132 #133b #180 #195 #196 #398 #406 #441
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
-passed ok
diff --git a/test/files/run/spec-absfun.flags b/test/files/run/spec-absfun.flags
new file mode 100644
index 0000000000..3a910a936c
--- /dev/null
+++ b/test/files/run/spec-absfun.flags
@@ -0,0 +1 @@
+-Yspecialize \ No newline at end of file
diff --git a/test/files/run/spec-absfun.scala b/test/files/run/spec-absfun.scala
new file mode 100644
index 0000000000..ab16e8febc
--- /dev/null
+++ b/test/files/run/spec-absfun.scala
@@ -0,0 +1,43 @@
+
+/** Test inheritance. See #3085.
+ * Anonymous functions extend AbstractFunction1[SpecializedPair[Int], Unit]. The
+ * specialized type SpecializedPair$mcI$sp should not leak into the superclass because
+ * the definition of apply would vary covariantly, and erasure won't consider it an
+ * override of the abstract apply, leading to an AbstractMethodError at runtime.
+ */
+
+object Test {
+
+ private val Max = 1000
+
+ def main(args: Array[String]) {
+ notSpecialized()
+ specialized()
+ }
+
+ def notSpecialized() {
+ val pairs = for { i <- 1 to Max; j <- 1 to i } yield new Pair(i, j)
+ val time0 = System.nanoTime
+ pairs foreach { p => p.first * p.second }
+ val time1 = System.nanoTime
+// println(time1 - time0)
+ }
+
+ def specialized() {
+ val pairs = for { i <- 1 to Max; j <- 1 to i } yield new SpecializedPair(i, j)
+ val time0 = System.nanoTime
+ pairs foreach { p => p.first * p.second }
+ val time1 = System.nanoTime
+// println(time1 - time0)
+ }
+}
+
+class Pair[A](_first: A, _second: A) {
+ def first = _first
+ def second = _second
+}
+
+class SpecializedPair[@specialized("Int") A](_first: A, _second: A) {
+ def first = _first
+ def second = _second
+}
diff --git a/test/files/run/spec-matrix.check b/test/files/run/spec-matrix.check
new file mode 100644
index 0000000000..72e8ffc0db
--- /dev/null
+++ b/test/files/run/spec-matrix.check
@@ -0,0 +1 @@
+*
diff --git a/test/files/run/spec-matrix.flags b/test/files/run/spec-matrix.flags
new file mode 100644
index 0000000000..3a910a936c
--- /dev/null
+++ b/test/files/run/spec-matrix.flags
@@ -0,0 +1 @@
+-Yspecialize \ No newline at end of file
diff --git a/test/files/run/spec-matrix.scala b/test/files/run/spec-matrix.scala
new file mode 100644
index 0000000000..212a800672
--- /dev/null
+++ b/test/files/run/spec-matrix.scala
@@ -0,0 +1,70 @@
+/** Test matrix multiplication with specialization.
+ */
+
+class Matrix[@specialized A: ClassManifest](val rows: Int, val cols: Int) {
+ private val arr: Array[Array[A]] = new Array[Array[A]](rows, cols)
+
+ def apply(i: Int, j: Int): A = {
+ if (i < 0 || i >= rows || j < 0 || j >= cols)
+ throw new NoSuchElementException("Indexes out of bounds: " + (i, j))
+
+ arr(i)(j)
+ }
+
+ def update(i: Int, j: Int, e: A) {
+ arr(i)(j) = e
+ }
+
+ def rowsIterator: Iterator[Array[A]] = new Iterator[Array[A]] {
+ var idx = 0;
+ def hasNext = idx < rows
+ def next = {
+ idx += 1
+ arr(idx - 1)
+ }
+ }
+}
+
+object Test {
+ def main(args: Array[String]) {
+ val m = randomMatrix(200, 100)
+ val n = randomMatrix(100, 200)
+
+ mult(m, n)
+ println("*")
+ }
+
+ def randomMatrix(n: Int, m: Int) = {
+ val r = new util.Random(10)
+ val x = new Matrix[Int](n, m)
+ for (i <- 0 until n; j <- 0 until m)
+ x(i, j) = r.nextInt
+ x
+ }
+
+
+ def multManifest[@specialized("Int") T](m: Matrix[T], n: Matrix[T])(implicit cm: ClassManifest[T], num: Numeric[T]) {
+ val p = new Matrix[T](m.rows, n.cols)
+ import num._
+
+ for (i <- 0 until m.rows)
+ for (j <- 0 until n.cols) {
+ var sum = num.zero
+ for (k <- 0 until n.rows)
+ sum += m(i, k) * n(k, j)
+ p(i, j) = sum
+ }
+ }
+
+ def mult(m: Matrix[Int], n: Matrix[Int]) {
+ val p = new Matrix[Int](m.rows, n.cols)
+
+ for (i <- 0 until m.rows)
+ for (j <- 0 until n.cols) {
+ var sum = 0
+ for (k <- 0 until n.rows)
+ sum += m(i, k) * n(k, j)
+ p(i, j) = sum
+ }
+ }
+}
diff --git a/test/files/run/spec-patmatch.check b/test/files/run/spec-patmatch.check
new file mode 100644
index 0000000000..a6679fa1c7
--- /dev/null
+++ b/test/files/run/spec-patmatch.check
@@ -0,0 +1,19 @@
+bool
+byte
+short
+char
+int
+long
+double
+float
+default
+object instantiations:
+bool
+byte
+short
+char
+int
+long
+double
+float
+default
diff --git a/test/files/run/spec-patmatch.flags b/test/files/run/spec-patmatch.flags
new file mode 100644
index 0000000000..3a910a936c
--- /dev/null
+++ b/test/files/run/spec-patmatch.flags
@@ -0,0 +1 @@
+-Yspecialize \ No newline at end of file
diff --git a/test/files/run/spec-patmatch.scala b/test/files/run/spec-patmatch.scala
new file mode 100644
index 0000000000..92938836d8
--- /dev/null
+++ b/test/files/run/spec-patmatch.scala
@@ -0,0 +1,52 @@
+class Foo[@specialized A] {
+ def test(x: A) = println(x match {
+ case _: Boolean => "bool"
+ case _: Byte => "byte"
+ case _: Short => "short"
+ case _: Char => "char"
+ case i: Int => "int"
+ case l: Long => "long"
+ case d: Double => "double"
+ case e: Float => "float"
+ case _ => "default"
+ })
+}
+
+object Test {
+ def test[@specialized A] (x: A) = println(x match {
+ case _: Boolean => "bool"
+ case _: Byte => "byte"
+ case _: Short => "short"
+ case _: Char => "char"
+ case i: Int => "int"
+ case l: Long => "long"
+ case d: Double => "double"
+ case e: Float => "float"
+ case _ => "default"
+ })
+
+ def main(args: Array[String]) {
+ test(true)
+ test(42.toByte)
+ test(42.toShort)
+ test('b')
+ test(42)
+ test(42l)
+ test(42.0)
+ test(42.0f)
+ test(new Object)
+
+ println("object instantiations:")
+ (new Foo).test(true)
+ (new Foo).test(42.toByte)
+ (new Foo).test(42.toShort)
+ (new Foo).test('b')
+ (new Foo).test(42)
+ (new Foo).test(42l)
+ (new Foo).test(42.0)
+ (new Foo).test(42.0f)
+ (new Foo).test(new Object)
+
+ }
+
+}
diff --git a/test/files/run/t0017.check b/test/files/run/t0017.check
index 86c5fe56a8..3a72142467 100644
--- a/test/files/run/t0017.check
+++ b/test/files/run/t0017.check
@@ -1 +1 @@
-Array(GenericArray(1, 3), GenericArray(2, 4))
+Array(ArraySeq(1, 3), ArraySeq(2, 4))
diff --git a/test/files/run/t0528.scala b/test/files/run/t0528.scala
index 5a4755c947..df6d13a17a 100644
--- a/test/files/run/t0528.scala
+++ b/test/files/run/t0528.scala
@@ -3,7 +3,7 @@ trait Sequ[A] {
}
class RichStr extends Sequ[Char] {
- // override to a primitve array
+ // override to a primitive array
def toArray: Array[Char] = Array('0', '1', '2', '3', '4', '5', '6', '7', '8', '9')
}
diff --git a/test/files/run/t1500.scala b/test/files/run/t1500.scala
index de79b84e75..ce6278cd05 100644
--- a/test/files/run/t1500.scala
+++ b/test/files/run/t1500.scala
@@ -18,10 +18,10 @@ object Test {
def main(args: Array[String]) = {
- val tool = new Interpreter(new Settings())
+ val settings = new Settings()
+ settings.classpath.value = System.getProperty("java.class.path")
+ val tool = new Interpreter(settings)
val global = tool.compiler
- // when running that compiler, give it a scala-library to the classpath
- global.settings.classpath.value = System.getProperty("java.class.path")
import global._
import definitions._
diff --git a/test/files/run/t1501.scala b/test/files/run/t1501.scala
index 851daae0bb..f1eb8f287d 100644
--- a/test/files/run/t1501.scala
+++ b/test/files/run/t1501.scala
@@ -28,12 +28,10 @@ object Test {
</code>.text
def main(args: Array[String]) = {
-
- val tool = new Interpreter(new Settings())
+ val settings = new Settings()
+ settings.classpath.value = System.getProperty("java.class.path")
+ val tool = new Interpreter(settings)
val global = tool.compiler
- // when running that compiler, give it a scala-library to the classpath
- global.settings.classpath.value = System.getProperty("java.class.path")
-
import global._
import definitions._
diff --git a/test/files/run/t1773.scala b/test/files/run/t1773.scala
index 81c6005f86..4a4aaba1f0 100644
--- a/test/files/run/t1773.scala
+++ b/test/files/run/t1773.scala
@@ -8,5 +8,5 @@ object Test extends Application
<a>{ if (true) "" else "I like turtles" }</a>
)
- for (x1 <- xs; x2 <- xs) assert (x1 == x2)
+ for (x1 <- xs; x2 <- xs) assert (x1 xml_== x2)
}
diff --git a/test/files/run/t2074.scala b/test/files/run/t2074.scala
deleted file mode 100644
index 60646be733..0000000000
--- a/test/files/run/t2074.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object Test extends Application {
- List.range(1,11).view.patch(5, List(100,101), 2)
-}
diff --git a/test/files/run/t2074_2.check b/test/files/run/t2074_2.check
index eb1f072de3..e4fc89b1ce 100644
--- a/test/files/run/t2074_2.check
+++ b/test/files/run/t2074_2.check
@@ -1,3 +1,3 @@
-IndexedSeqView(1, 2, 3)
-IndexedSeqView(1, 2, 3)
-IndexedSeqViewZ((1,1), (2,2), (3,3))
+SeqView(1, 2, 3)
+SeqView(1, 2, 3)
+SeqViewZ(...)
diff --git a/test/files/run/t2074_2.scala b/test/files/run/t2074_2.scala
index 7d1d8181d8..4624170f89 100644
--- a/test/files/run/t2074_2.scala
+++ b/test/files/run/t2074_2.scala
@@ -1,12 +1,15 @@
// replaced all occurrences of 'Vector' with 'IndexedSeq'
import scala.collection.immutable.IndexedSeq
-import scala.collection.IndexedSeqView
+import scala.collection.SeqView
object Test {
- val v = new IndexedSeqView[Int, IndexedSeq[Int]] {
+ val funWithCCE = List.range(1,11).view.patch(5, List(100,101), 2)
+
+ val v = new SeqView[Int, IndexedSeq[Int]] {
def underlying = IndexedSeq(1,2,3)
def apply(idx: Int) = underlying(idx)
def length = underlying.length
+ def iterator = underlying.iterator
}
val w = IndexedSeq(1, 2, 3).view
diff --git a/test/files/run/t2212.scala b/test/files/run/t2212.scala
new file mode 100644
index 0000000000..b9c0cd776b
--- /dev/null
+++ b/test/files/run/t2212.scala
@@ -0,0 +1,10 @@
+object Test {
+ def main(args: Array[String]) {
+ import collection.mutable._
+ val x4 = LinkedList[Int](1)
+ println(x4)
+ val y4 = LinkedList[Int](1)
+ println(y4)
+ println(x4 equals y4) // or (y4 equals x4)
+ }
+}
diff --git a/test/files/run/t2417.check b/test/files/run/t2417.check
new file mode 100644
index 0000000000..36c954be24
--- /dev/null
+++ b/test/files/run/t2417.check
@@ -0,0 +1,12 @@
+testing small Map that doesn't promote to HashMap...
+
+testing single-threaded HashMap use...
+
+testing HashMap.size from multiple threads...
+
+testing small Set that doesn't promote to HashSet...
+
+testing single-threaded HashSet use...
+
+testing HashSet.size from multiple threads...
+
diff --git a/test/files/run/t2417.scala b/test/files/run/t2417.scala
new file mode 100644
index 0000000000..31d4c143fa
--- /dev/null
+++ b/test/files/run/t2417.scala
@@ -0,0 +1,77 @@
+// #2417
+object Test {
+
+ def parallel(numThreads: Int)(block: => Unit) {
+ var failure: Throwable = null
+ val threads = Array.fromFunction(i => new Thread {
+ override def run {
+ try {
+ block
+ } catch {
+ case x => failure = x
+ }
+ }
+ })(numThreads)
+ for (t <- threads) t.start
+ for (t <- threads) t.join
+ if (failure != null) println("FAILURE: " + failure)
+ }
+
+ def testSet(initialSize: Int, numThreads: Int, passes: Int) {
+ val orig = Set.empty ++ (1 to initialSize)
+ parallel(numThreads) {
+ for (pass <- 0 until passes) {
+ var s = orig
+ for (e <- (initialSize to 1 by -1)) {
+ s -= e
+ val obs = s.size
+ if (obs != e - 1) {
+ throw new Exception("removed e=" + e + ", size was " + obs + ", s=" + s)
+ }
+ }
+ }
+ }
+ }
+
+ def testMap(initialSize: Int, numThreads: Int, passes: Int) {
+ val orig = Map.empty ++ ((1 to initialSize) map ((_,"v")))
+ parallel(numThreads) {
+ for (pass <- 0 until passes) {
+ var m = orig
+ for (e <- (initialSize to 1 by -1)) {
+ m -= e
+ val obs = m.size
+ if (obs != e - 1) {
+ throw new Exception("removed e=" + e + ", size was " + obs + ", m=" + m)
+ }
+ }
+ }
+ }
+ }
+
+ def main(args: Array[String]) {
+ println("testing small Map that doesn't promote to HashMap...")
+ testMap(4, 2, 1000000)
+ println()
+
+ println("testing single-threaded HashMap use...")
+ testMap(5, 1, 1000000)
+ println()
+
+ println("testing HashMap.size from multiple threads...")
+ testMap(5, 2, 1000000)
+ println()
+
+ println("testing small Set that doesn't promote to HashSet...")
+ testSet(4, 2, 1000000)
+ println()
+
+ println("testing single-threaded HashSet use...")
+ testSet(5, 1, 1000000)
+ println()
+
+ println("testing HashSet.size from multiple threads...")
+ testSet(5, 2, 1000000)
+ println()
+ }
+} \ No newline at end of file
diff --git a/test/files/run/t2526.scala b/test/files/run/t2526.scala
index 5f6d60546a..53f3059135 100644
--- a/test/files/run/t2526.scala
+++ b/test/files/run/t2526.scala
@@ -20,9 +20,8 @@ object Test {
assertForeach(keys, m.keysIterator)
assertForeach(keys, m.keySet)
- assertForeach(values, m.valuesIterable.iterator)
+ assertForeach(values, m.values.iterator)
assertForeach(values, m.valuesIterator)
- assertForeach(values, m.valuesIterable)
assertForeach(entries, m)
}
@@ -39,7 +38,7 @@ object Test {
/*
* Checks foreach of `actual` goes over all the elements in `expected`
- * We duplicate the method above because there is no common inteface between Traverable and
+ * We duplicate the method above because there is no common inteface between Traversable and
* Iterator and we want to avoid converting between collections to ensure that we test what
* we mean to test.
*/
diff --git a/test/files/run/t2867.scala b/test/files/run/t2867.scala
new file mode 100644
index 0000000000..25e55eaecd
--- /dev/null
+++ b/test/files/run/t2867.scala
@@ -0,0 +1,15 @@
+object Test {
+ case class A(l: List[_]*)
+
+ def main(args: Array[String]): Unit = {
+ /** Kind of sneaking a slightly different test in here as well as
+ * testing 2867. How subversive.
+ */
+ val xs1 = List(1, 2, 3)
+ val xs2 = List(1.0, 2.0, 3.0)
+ val xs3 = List[Any](1.0f, 2.0f, 3.0f)
+ val xs4 = List[Byte](1, 2, 3)
+
+ assert(A(List(xs1, xs2)) == A(List(xs3, xs4)))
+ }
+}
diff --git a/test/files/run/t2886.check b/test/files/run/t2886.check
new file mode 100644
index 0000000000..39ee46a3df
--- /dev/null
+++ b/test/files/run/t2886.check
@@ -0,0 +1 @@
+Function(List(LocalValue(NoSymbol,x,PrefixedType(SingleType(ThisType(Class(scala)),Field(scala.Predef,PrefixedType(ThisType(Class(scala)),Class(scala.Predef)))),TypeField(scala.Predef.String,PrefixedType(ThisType(Class(java.lang)),Class(java.lang.String)))))),Block(List(ValDef(LocalValue(NoSymbol,x$1,NoType),Ident(LocalValue(NoSymbol,x,PrefixedType(SingleType(ThisType(Class(scala)),Field(scala.Predef,PrefixedType(ThisType(Class(scala)),Class(scala.Predef)))),TypeField(scala.Predef.String,PrefixedType(ThisType(Class(java.lang)),Class(java.lang.String))))))), ValDef(LocalValue(NoSymbol,x$2,NoType),Ident(LocalValue(NoSymbol,x,PrefixedType(SingleType(ThisType(Class(scala)),Field(scala.Predef,PrefixedType(ThisType(Class(scala)),Class(scala.Predef)))),TypeField(scala.Predef.String,PrefixedType(ThisType(Class(java.lang)),Class(java.lang.String)))))))),Apply(Select(This(Class(Test)),Method(Test.test,MethodType(List(LocalValue(NoSymbol,name,PrefixedType(SingleType(ThisType(Class(scala)),Field(scala.Predef,PrefixedType(ThisType(Class(scala)),Class(scala.Predef)))),TypeField(scala.Predef.String,PrefixedType(ThisType(Class(java.lang)),Class(java.lang.String))))), LocalValue(NoSymbol,address,PrefixedType(SingleType(ThisType(Class(scala)),Field(scala.Predef,PrefixedType(ThisType(Class(scala)),Class(scala.Predef)))),TypeField(scala.Predef.String,PrefixedType(ThisType(Class(java.lang)),Class(java.lang.String)))))),PrefixedType(ThisType(Class(scala)),Class(scala.Null))))),List(Ident(LocalValue(NoSymbol,x$2,NoType)), Ident(LocalValue(NoSymbol,x$1,NoType)))))) \ No newline at end of file
diff --git a/test/files/run/t2886.scala b/test/files/run/t2886.scala
new file mode 100644
index 0000000000..eb392f0c58
--- /dev/null
+++ b/test/files/run/t2886.scala
@@ -0,0 +1,7 @@
+object Test {
+ def test(name: String, address: String) = null
+ def main(args: Array[String]) = {
+ val tree = scala.reflect.Code.lift((x:String) => test(address=x,name=x)).tree
+ println(tree)
+ }
+}
diff --git a/test/files/run/t2946/Parsers.scala b/test/files/run/t2946/Parsers.scala
new file mode 100644
index 0000000000..c0961034c4
--- /dev/null
+++ b/test/files/run/t2946/Parsers.scala
@@ -0,0 +1,4 @@
+class Parser {
+ def parse(t: Any): Unit = {
+ }
+}
diff --git a/test/files/run/t2946/ResponseCommon.scala b/test/files/run/t2946/ResponseCommon.scala
new file mode 100644
index 0000000000..fa9d8acccb
--- /dev/null
+++ b/test/files/run/t2946/ResponseCommon.scala
@@ -0,0 +1,14 @@
+trait ResponseCommon extends Parser {
+ private[this] var paramsParser: Parser = null
+ def withParamsParser(parser: Parser) = {paramsParser = parser; this}
+
+ class Foo {
+ println(paramsParser)
+ }
+
+ override abstract def parse(t: Any): Unit = t match {
+ case ("params", value: List[_]) => value.foreach {paramsParser.parse(_)}
+ case _ => super.parse(t)
+ }
+}
+
diff --git a/test/files/run/t2946/Test.scala b/test/files/run/t2946/Test.scala
new file mode 100644
index 0000000000..e9d9896a0e
--- /dev/null
+++ b/test/files/run/t2946/Test.scala
@@ -0,0 +1,7 @@
+class Test extends Parser with ResponseCommon
+
+object Test {
+ def main(args: Array[String]) {
+ new Test
+ }
+}
diff --git a/test/files/run/t3026.check b/test/files/run/t3026.check
new file mode 100644
index 0000000000..8c29b615fa
--- /dev/null
+++ b/test/files/run/t3026.check
@@ -0,0 +1,2 @@
+RED
+YELLOW
diff --git a/test/files/run/t3026.scala b/test/files/run/t3026.scala
new file mode 100755
index 0000000000..22dde9cc03
--- /dev/null
+++ b/test/files/run/t3026.scala
@@ -0,0 +1,8 @@
+object Test {
+ abstract class Colour
+ case object RED extends Colour
+ case object YELLOW extends Colour
+ val items = Array(RED, YELLOW)
+
+ def main(args: Array[String]): Unit = items foreach println
+}
diff --git a/test/files/run/t3112.check b/test/files/run/t3112.check
new file mode 100644
index 0000000000..a95644c82c
--- /dev/null
+++ b/test/files/run/t3112.check
@@ -0,0 +1,4 @@
+Vector()
+Vector()
+Vector()
+Vector() \ No newline at end of file
diff --git a/test/files/run/t3112.scala b/test/files/run/t3112.scala
new file mode 100644
index 0000000000..eb8eec6327
--- /dev/null
+++ b/test/files/run/t3112.scala
@@ -0,0 +1,11 @@
+// #3112
+object Test {
+
+ def main(args: Array[String]): Unit = {
+ println((Vector() ++ (0 until 32)) take 0) // works
+ println((Vector() ++ (0 until 33)) take 0) // error
+ println((Vector() ++ (0 until 32)) takeRight 0) // works
+ println((Vector() ++ (0 until 33)) takeRight 0) // error
+ }
+
+} \ No newline at end of file
diff --git a/test/files/run/t3158.check b/test/files/run/t3158.check
new file mode 100644
index 0000000000..ab1cb284d5
--- /dev/null
+++ b/test/files/run/t3158.check
@@ -0,0 +1 @@
+Array(<function1>)
diff --git a/test/files/run/t3158.scala b/test/files/run/t3158.scala
new file mode 100644
index 0000000000..c824b62e96
--- /dev/null
+++ b/test/files/run/t3158.scala
@@ -0,0 +1,9 @@
+object Test {
+ def main(args: Array[String]) {
+ println(args.map(_ => foo _).deep)
+ }
+
+ def foo(xs: String*) {
+ println(xs)
+ }
+}
diff --git a/test/pending/jvm/t1464.check b/test/files/run/t3186.check
index c508d5366f..c508d5366f 100644
--- a/test/pending/jvm/t1464.check
+++ b/test/files/run/t3186.check
diff --git a/test/files/run/t3186.scala b/test/files/run/t3186.scala
new file mode 100644
index 0000000000..2534d4a164
--- /dev/null
+++ b/test/files/run/t3186.scala
@@ -0,0 +1,7 @@
+object Dist1 extends Enumeration { val Mile, Foot, Inch = Value }
+
+object Dist2 extends Enumeration { val Kilometer, Millimeter, Parsec = Value }
+
+object Test extends Application {
+ println(Dist1.Mile == Dist2.Kilometer)
+}
diff --git a/test/files/run/t3242.check b/test/files/run/t3242.check
new file mode 100644
index 0000000000..a145f6df8f
--- /dev/null
+++ b/test/files/run/t3242.check
@@ -0,0 +1,18 @@
+ append [num: 200] vec
+ remove [num: 200] vec
+ append [num: 400] vec
+ remove [num: 400] vec
+ append [num: 600] vec
+ remove [num: 600] vec
+ append [num: 800] vec
+ remove [num: 800] vec
+>> comparison done, num: 200
+ append [num: 2000] vec
+ remove [num: 2000] vec
+ append [num: 4000] vec
+ remove [num: 4000] vec
+ append [num: 6000] vec
+ remove [num: 6000] vec
+ append [num: 8000] vec
+ remove [num: 8000] vec
+>> comparison done, num: 2000
diff --git a/test/files/run/t3242.scala b/test/files/run/t3242.scala
new file mode 100644
index 0000000000..f8defaa5cd
--- /dev/null
+++ b/test/files/run/t3242.scala
@@ -0,0 +1,49 @@
+object Test {
+
+ def benchmarkA(num: Int) {
+
+ type A = Int
+
+ def updateM[M[_]](ms: M[A], update: (M[A], A)=>M[A]): M[A] = {
+ var is = ms
+ for (i <- 0 until num) is = update(is, i)
+ is
+ }
+
+ //
+ def vectorAppend: Vector[A] = updateM[Vector](Vector(), (as, a)=>{
+ val v = (as :+ a)
+ //println("==>append: i: "+i1+", v: "+v)
+ v
+ })
+ // this will crash, Vector bug!
+ def vectorRemove(vec: Vector[A]): Vector[A] = updateM[Vector](vec, (as, a)=>{
+ val v = (as filterNot{ _ == a})
+ //val v = (is filter{ _ != i})
+ //println("==>remove: i: "+a)
+ v
+ })
+
+ val ct = vectorAppend
+ println(" append [num: "+num+"] vec")
+ vectorRemove(ct)
+ println(" remove [num: "+num+"] vec")
+ } // BenchmarkA
+
+ def comparison(num: Int): Unit = {
+ for (i <- 1 until 5) benchmarkA(num*i)
+ println(">> comparison done, num: "+num);
+ }
+
+ def main(args: Array[String]): Unit = {
+ try {
+ //createBenchmarkA(23).testRun
+
+ comparison(200) // OK
+ comparison(2000) // this will crach
+
+ } catch {
+ case e: Exception => e.printStackTrace()
+ }
+ }
+}
diff --git a/test/files/run/t3242b.scala b/test/files/run/t3242b.scala
new file mode 100644
index 0000000000..7a296aac15
--- /dev/null
+++ b/test/files/run/t3242b.scala
@@ -0,0 +1,17 @@
+import scala.collection.immutable._
+
+object Test {
+
+ def test(n: Int) = {
+ var vb = new VectorBuilder[Int]
+ for (i <- 0 until n)
+ vb += i
+ val v = vb.result
+ assert(v == (0 until n), "not same as (0 until " + n + "): " + v)
+ }
+
+ def main(args: Array[String]): Unit = {
+ for (i <- 0 until 2000)
+ test(i)
+ }
+}
diff --git a/test/files/run/treePrint.check b/test/files/run/treePrint.check
new file mode 100644
index 0000000000..3360815ac1
--- /dev/null
+++ b/test/files/run/treePrint.check
@@ -0,0 +1,5 @@
+def foo = {
+ var q: Boolean = false;
+ val x = 5;
+ ((x == 5) || (!q)) || (true)
+}
diff --git a/test/files/run/treePrint.scala b/test/files/run/treePrint.scala
new file mode 100644
index 0000000000..ffe9a392d4
--- /dev/null
+++ b/test/files/run/treePrint.scala
@@ -0,0 +1,40 @@
+/** Testing compact tree printers.
+ */
+object Test {
+ import scala.tools.nsc._
+ import java.io.{ OutputStream, BufferedReader, StringReader, PrintWriter, Writer, OutputStreamWriter}
+
+ val code = """
+ def foo = {
+ var q: Boolean = false
+ val x = if (true) {
+ if (true) {
+ if (true) {
+ 5
+ }
+ else if (true) {
+ 5
+ } else {
+ 10
+ }
+ }
+ else 20
+ }
+ else 30
+
+ (x == 5) || !q || true
+ }
+ """
+
+ class NullOutputStream extends OutputStream { def write(b: Int) { } }
+
+ def main(args: Array[String]) {
+ val settings = new Settings
+ settings.classpath.value = System.getProperty("java.class.path")
+ settings.Ycompacttrees.value = true
+
+ val repl = new Interpreter(settings, new PrintWriter(new NullOutputStream))
+ repl.interpret("""def initialize = "Have to interpret something or we get errors." """)
+ println(repl mkTree code)
+ }
+}
diff --git a/test/files/run/unittest_collection.scala b/test/files/run/unittest_collection.scala
index 5d7ab97425..d45c23d4b5 100644
--- a/test/files/run/unittest_collection.scala
+++ b/test/files/run/unittest_collection.scala
@@ -96,7 +96,7 @@ object Test {
)
val tr = new TestResult()
ts.run(tr)
- for(val failure <- tr.failures) {
+ for (failure <- tr.failures) {
Console.println(failure)
}
}
diff --git a/test/files/run/unittest_iterator.scala b/test/files/run/unittest_iterator.scala
index 93aaa4a834..28a548160f 100644
--- a/test/files/run/unittest_iterator.scala
+++ b/test/files/run/unittest_iterator.scala
@@ -33,5 +33,18 @@ object Test
assertThat(1, (1 to 8).toList) { it.sliding(8, 8) withPartial false }
assertThat(2, List(9, 10, -1, -1, -1)) { it.sliding(5, 8) withPadding -1 }
assertThat(1, (1 to 5).toList) { it.sliding(5, 8) withPartial false }
+
+ // make sure it throws past th end
+ val thrown = try {
+ val it = List(1,2,3).sliding(2)
+ it.next
+ it.next
+ it.next
+ false
+ }
+ catch {
+ case _: NoSuchElementException => true
+ }
+ assert(thrown)
}
}
diff --git a/test/files/run/vector1.scala b/test/files/run/vector1.scala
index 320bef220c..e8785e2b16 100644
--- a/test/files/run/vector1.scala
+++ b/test/files/run/vector1.scala
@@ -22,7 +22,7 @@ object Test {
def vectorForward(label: String, n: Int): Vector[String] = {
var a: Vector[String] = Vector.empty
for (i <- 0 until n)
- a = a.appendBack(label + i)
+ a = a :+ (label + i)
assertVector(a, label, 0, n)
}
@@ -30,7 +30,7 @@ object Test {
def vectorBackward(label: String, n: Int): Vector[String] = {
var a: Vector[String] = Vector.empty
for (i <- 0 until n)
- a = a.appendFront(label + (n-1-i))
+ a = (label + (n-1-i)) +: a
assertVector(a, label, 0, n)
}
@@ -92,8 +92,8 @@ object Test {
def nextChunkSize = 3 //rand.nextInt(chunkLimit)
- def seqBack() = for (i <- 0 until Math.min(nextChunkSize, N-max)) { a = a.appendBack("a"+max); max += 1 }
- def seqFront() = for (i <- 0 until Math.min(nextChunkSize, min)) { min -= 1; a = a.appendFront("a"+min) }
+ def seqBack() = for (i <- 0 until Math.min(nextChunkSize, N-max)) { a = a :+ ("a"+max); max += 1 }
+ def seqFront() = for (i <- 0 until Math.min(nextChunkSize, min)) { min -= 1; a = ("a"+min) +: a }
try {
@@ -104,7 +104,7 @@ object Test {
} catch {
case ex =>
//println("----------------")
- a.debug
+ //a.debug
throw ex
}
diff --git a/test/files/run/viewtest.check b/test/files/run/viewtest.check
index ded3ac0e92..6e0fe81a67 100644
--- a/test/files/run/viewtest.check
+++ b/test/files/run/viewtest.check
@@ -1,17 +1,11 @@
-SeqViewZ((x,0))
+SeqViewZ(...)
ys defined
mapping 1
2
-mapping 1
-mapping 2
-mapping 3
-SeqViewMS(3, 4)
+SeqViewMS(...)
mapping 3
4
-mapping 1
-mapping 2
-mapping 3
-SeqViewM(2, 3, 4)
+SeqViewM(...)
mapping 1
mapping 2
mapping 3
diff --git a/test/files/run/viewtest.scala b/test/files/run/viewtest.scala
index 280ded57cf..833c0101e3 100755
--- a/test/files/run/viewtest.scala
+++ b/test/files/run/viewtest.scala
@@ -13,7 +13,7 @@ object Test extends Application {
println(ys.force)
val zs = Array(1, 2, 3).view
- val as: IndexedSeqView[Int, Array[Int]] = zs map (_ + 1)
+ val as: SeqView[Int, Array[Int]] = zs map (_ + 1)
val bs: Array[Int] = as.force
val cs = zs.reverse
cs(0) += 1
diff --git a/test/files/run/withIndex.scala b/test/files/run/withIndex.scala
index 3b9c9e84e5..910b1f1f9e 100644
--- a/test/files/run/withIndex.scala
+++ b/test/files/run/withIndex.scala
@@ -3,7 +3,7 @@ object Test {
val ary: Array[String] = Array("a", "b", "c")
val lst: List[String] = List("a", "b", "c")
val itr: Iterator[String] = lst.iterator
- val str: Stream[String] = Stream.fromIterator(lst.iterator)
+ val str: Stream[String] = lst.iterator.toStream
Console.println(ary.zipWithIndex.toList)
Console.println(lst.zipWithIndex.toList)
diff --git a/test/files/scalacheck/array.scala b/test/files/scalacheck/array.scala
index 2febca4447..03c0217180 100644
--- a/test/files/scalacheck/array.scala
+++ b/test/files/scalacheck/array.scala
@@ -4,36 +4,34 @@ import Gen._
import Arbitrary._
import util._
import Buildable._
+import scala.collection.mutable.ArraySeq
object Test extends Properties("Array") {
- val myGens: Seq[Gen[Array[_]]] = List(
- arbArray[Int],
- arbArray[Array[Int]],
- arbArray[List[String]],
- arbArray[String],
- arbArray[Boolean],
- arbArray[AnyVal](arbAnyVal)
- ) map (_.arbitrary)
+ /** At this moment the authentic scalacheck Array Builder/Arb bits are commented out.
+ */
+ implicit def arbArray[T](implicit a: Arbitrary[T], m: Manifest[T]): Arbitrary[Array[T]] =
+ Arbitrary(containerOf[List,T](arbitrary[T]) map (_.toArray))
+
+ val arrGen: Gen[Array[_]] = oneOf(
+ arbitrary[Array[Int]],
+ arbitrary[Array[Array[Int]]],
+ arbitrary[Array[List[String]]],
+ arbitrary[Array[String]],
+ arbitrary[Array[Boolean]],
+ arbitrary[Array[AnyVal]]
+ )
// inspired by #1857 and #2352
- property("eq/ne") =
- forAll(oneOf(myGens: _*)) { c1 =>
- forAll(oneOf(myGens: _*)) { c2 =>
- (c1 eq c2) || (c1 ne c2)
- }
- }
+ property("eq/ne") = forAll(arrGen, arrGen) { (c1, c2) =>
+ (c1 eq c2) || (c1 ne c2)
+ }
- def smallInt = choose(1, 10)
// inspired by #2299
- property("ofDim") = forAll(smallInt) { i1 =>
- forAll(smallInt) { i2 =>
- forAll(smallInt) { i3 =>
- val arr = Array.ofDim[String](i1, i2, i3)
- val flattened = arr flatMap (x => x) flatMap (x => x)
-
- flattened.length == i1 * i2 * i3
- }
- }
+ def smallInt = choose(1, 10)
+ property("ofDim") = forAll(smallInt, smallInt, smallInt) { (i1, i2, i3) =>
+ val arr = Array.ofDim[String](i1, i2, i3)
+ val flattened = arr flatMap (x => x) flatMap (x => x)
+ flattened.length == i1 * i2 * i3
}
}
diff --git a/test/files/scalacheck/eqeq.scala b/test/files/scalacheck/eqeq.scala
new file mode 100644
index 0000000000..60fe63c207
--- /dev/null
+++ b/test/files/scalacheck/eqeq.scala
@@ -0,0 +1,37 @@
+import org.scalacheck._
+import Prop._
+import Gen._
+
+object Test extends Properties("==") {
+ def equalObjectsEqualHashcodes(x: Any, y: Any) = (x != y) || (x == y && x.## == y.##)
+
+ // ticket #2087
+ property("short/char") = forAll { (x: Short) => {
+ val ch: Char = x.toChar
+ (x == ch) == (ch == x)
+ }
+ }
+
+ property("symmetry") = forAll { (x: AnyVal, y: AnyVal) => (x == y) == (y == x) }
+ property("transitivity") = forAll { (x: AnyVal, y: AnyVal, z: AnyVal) => x != y || y != z || x == z }
+
+ property("##") = forAll {
+ (x: Short) => {
+ val anyvals = List(x.toByte, x.toChar, x, x.toInt, x.toLong, x.toFloat, x.toDouble, BigInt(x), BigDecimal(x))
+ val shortAndLarger = anyvals drop 2
+
+ val result = (
+ ((anyvals, anyvals).zipped forall equalObjectsEqualHashcodes) &&
+ ((shortAndLarger, shortAndLarger).zipped forall (_ == _)) &&
+ ((shortAndLarger, shortAndLarger).zipped forall ((x, y) => (x: Any) == (y: Any)))
+ )
+ result
+ }
+ }
+ property("## 2") = forAll {
+ (dv: Double) => {
+ val fv = dv.toFloat
+ (fv != dv) || (fv.## == dv.##)
+ }
+ }
+}
diff --git a/test/files/scalacheck/list.scala b/test/files/scalacheck/list.scala
index 87ecd70a48..1caf35e872 100644
--- a/test/files/scalacheck/list.scala
+++ b/test/files/scalacheck/list.scala
@@ -7,14 +7,14 @@ object Test extends Properties("List") {
property("concat size") = forAll { (l1: List[Int], l2: List[Int]) => (l1.size + l2.size) == (l1 ::: l2).size }
property("reverse") = forAll { (l1: List[Int]) => l1.reverse.reverse == l1 }
- property("toSet") = forAll { (l1: List[Int]) => sorted(l1.toSet.toList) sameElements sorted(l1).removeDuplicates }
+ property("toSet") = forAll { (l1: List[Int]) => sorted(l1.toSet.toList) sameElements sorted(l1).distinct }
property("flatten") = forAll { (xxs: List[List[Int]]) => xxs.flatten.length == (xxs map (_.length) sum) }
property("startsWith/take") = forAll { (xs: List[Int], count: Int) => xs startsWith (xs take count) }
property("endsWith/takeRight") = forAll { (xs: List[Int], count: Int) => xs endsWith (xs takeRight count) }
property("fill") = forAll(choose(1, 100)) { count =>
forAll { (x: Int) =>
val xs = List.fill(count)(x)
- (xs.length == count) && (xs.removeDuplicates == List(x))
+ (xs.length == count) && (xs.distinct == List(x))
}
}
}
diff --git a/test/files/scalacheck/range.scala b/test/files/scalacheck/range.scala
new file mode 100644
index 0000000000..faa1f5d479
--- /dev/null
+++ b/test/files/scalacheck/range.scala
@@ -0,0 +1,205 @@
+import org.scalacheck._
+import Prop._
+import Gen._
+import Arbitrary._
+
+class Counter(r: Range) {
+ var cnt = 0L
+ var last: Option[Int] = None
+ val str = "Range["+r.start+", "+r.end+", "+r.step+(if (r.isInclusive) "]" else ")")
+ def apply(x: Int) = {
+ cnt += 1L
+ if (cnt % 500000000L == 0L) {
+ println("Working: %s %d %d" format (str, cnt, x))
+ }
+ if (cnt > (Int.MaxValue.toLong + 1) * 2)
+ error("Count exceeds maximum possible for an Int Range")
+ if ((r.step > 0 && last.exists(_ > x)) || (r.step < 0 && last.exists(_ < x)))
+ error("Range wrapped: %d %s" format (x, last.toString))
+ last = Some(x)
+ }
+}
+
+abstract class RangeTest(kind: String) extends Properties("Range "+kind) {
+ def myGen: Gen[Range]
+
+ val genRange = for {
+ start <- arbitrary[Int]
+ end <- arbitrary[Int]
+ step <- Gen.choose(1, (start - end).abs + 1)
+ } yield if (start < end) Range(start, end, step) else Range(start, end, -step)
+
+ val genReasonableSizeRange = for {
+ start <- choose(-Int.MinValue, Int.MaxValue)
+ end <- choose(-Int.MinValue, Int.MaxValue)
+ step <- choose(-Int.MaxValue, Int.MaxValue)
+ } yield Range(start, end, if (step == 0) 100 else step)
+
+ val genSmallRange = for {
+ start <- choose(-100, 100)
+ end <- choose(-100, 100)
+ step <- choose(1, 1)
+ } yield if (start < end) Range(start, end, step) else Range(start, end, -step)
+
+ val genRangeByOne = for {
+ start <- arbitrary[Int]
+ end <- arbitrary[Int]
+ if (end.toLong - start.toLong).abs <= 10000000L
+ } yield if (start < end) Range(start, end) else Range(end, start)
+
+ def str(r: Range) = "Range["+r.start+", "+r.end+", "+r.step+(if (r.isInclusive) "]" else ")")
+
+ def expectedSize(r: Range): Long = if (r.isInclusive) {
+ (r.end.toLong - r.start.toLong < 0, r.step < 0) match {
+ case (true, true) | (false, false) => (r.end.toLong - r.start.toLong).abs / r.step.abs.toLong + 1L
+ case _ => if (r.start == r.end) 1L else 0L
+ }
+ } else {
+ (r.end.toLong - r.start.toLong < 0, r.step < 0) match {
+ case (true, true) | (false, false) => (
+ (r.end.toLong - r.start.toLong).abs / r.step.abs.toLong
+ + (if ((r.end.toLong - r.start.toLong).abs % r.step.abs.toLong > 0L) 1L else 0L)
+ )
+ case _ => 0L
+ }
+ }
+
+ def within(r: Range, x: Int) = if (r.step > 0)
+ r.start <= x && (if (r.isInclusive) x <= r.end else x < r.end)
+ else
+ r.start >= x && (if (r.isInclusive) x >= r.end else x > r.end)
+
+ def multiple(r: Range, x: Int) = (x.toLong - r.start) % r.step == 0
+
+ property("foreach.step") = forAll(myGen) { r =>
+ var allValid = true
+ val cnt = new Counter(r)
+// println("--------------------")
+// println(r)
+ r foreach { x => cnt(x)
+// println(x + ", " + (x - r.start) + ", " + (x.toLong - r.start) + ", " + ((x.toLong - r.start) % r.step))
+ allValid &&= multiple(r, x)
+ }
+ allValid :| str(r)
+ }
+
+ property("foreach.inside.range") = forAll(myGen) { r =>
+ var allValid = true
+ var last: Option[Int] = None
+ val cnt = new Counter(r)
+ r foreach { x => cnt(x)
+ allValid &&= within(r, x)
+ }
+ allValid :| str(r)
+ }
+
+ property("foreach.visited.size") = forAll(myGen) { r =>
+ var visited = 0L
+ val cnt = new Counter(r)
+ r foreach { x => cnt(x)
+ visited += 1L
+ }
+// println("----------")
+// println(str(r))
+// println("size: " + r.size)
+// println("expected: " + expectedSize(r))
+// println("visited: " + visited)
+ (visited == expectedSize(r)) :| str(r)
+ }
+
+ property("length") = forAll(myGen suchThat (r => expectedSize(r).toInt == expectedSize(r))) { r =>
+ (r.length == expectedSize(r)) :| str(r)
+ }
+
+ property("isEmpty") = forAll(myGen suchThat (r => expectedSize(r).toInt == expectedSize(r))) { r =>
+ (r.isEmpty == (expectedSize(r) == 0L)) :| str(r)
+ }
+
+ property("contains") = forAll(myGen, arbInt.arbitrary) { (r, x) =>
+// println("----------------")
+// println(str(r))
+// println(x)
+// println("within: " + within(r, x))
+// println("multiple: " + multiple(r, x))
+// println("contains: " + r.contains(x))
+ ((within(r, x) && multiple(r, x)) == r.contains(x)) :| str(r)+": "+x
+ }
+
+ property("take") = forAll(myGen suchThat (r => expectedSize(r).toInt == expectedSize(r)), arbInt.arbitrary) { (r, x) =>
+ val t = r take x
+ (t.size == (0 max x min r.size) && t.start == r.start && t.step == r.step) :| str(r)+" / "+str(t)+": "+x
+ }
+
+ property("takeWhile") = forAll(myGen suchThat (r => expectedSize(r).toInt == expectedSize(r)), arbInt.arbitrary) { (r, x) =>
+ val t = (if (r.step > 0) r takeWhile (_ <= x) else r takeWhile(_ >= x))
+ if (r.size == 0) {
+ (t.size == 0) :| str(r)+" / "+str(t)+": "+x
+ } else {
+ val t2 = (if (r.step > 0) Range(r.start, x min r.last, r.step).inclusive else Range(r.start, x max r.last, r.step).inclusive)
+ (t.start == r.start && t.size == t2.size && t.step == r.step) :| str(r)+" / "+str(t)+" / "+str(t2)+": "+x
+ }
+ }
+
+ property("reverse.toSet.equal") = forAll(myGen) { r =>
+ val reversed = r.reverse
+ val aresame = r.toSet == reversed.toSet
+ if (!aresame) {
+ println(str(r))
+ println(r)
+ println(reversed)
+ println(r.toSet)
+ println(reversed.toSet)
+ }
+ aresame
+ }
+}
+
+object NormalRangeTest extends RangeTest("normal") {
+ override def myGen = genReasonableSizeRange
+ def genOne = for {
+ start <- arbitrary[Int]
+ end <- arbitrary[Int]
+ if (start.toLong - end.toLong).abs < Int.MaxValue.toLong
+ } yield Range(start, end, if (start < end) 1 else - 1)
+ property("by 1.size + 1 == inclusive.size") = forAll(genOne) { r =>
+ (r.size + 1 == r.inclusive.size) :| str(r)
+ }
+}
+
+object InclusiveRangeTest extends RangeTest("inclusive") {
+ override def myGen = for (r <- genReasonableSizeRange) yield r.inclusive
+}
+
+object ByOneRangeTest extends RangeTest("byOne") {
+ override def myGen = genSmallRange
+}
+
+object InclusiveByOneRangeTest extends RangeTest("inclusiveByOne") {
+ override def myGen = for (r <- genSmallRange) yield r.inclusive
+}
+
+object SmallValuesRange extends RangeTest("smallValues") {
+ override def myGen = genSmallRange
+}
+
+object Test extends Properties("Range") {
+ include(NormalRangeTest)
+ include(InclusiveRangeTest)
+ include(ByOneRangeTest)
+ include(InclusiveByOneRangeTest)
+}
+
+/* Mini-benchmark
+def testRange(i: Int, j: Int, k: Int) = {
+ var count = 0
+ for {
+ vi <- 0 to i
+ vj <- 0 to j
+ vk <- 0 to k
+ } { count += 1 }
+}
+
+testRange(10, 1000, 10000)
+testRange(10000, 1000, 10)
+*/
+
diff --git a/test/files/scalacheck/scan.scala b/test/files/scalacheck/scan.scala
new file mode 100644
index 0000000000..e9b25ce3df
--- /dev/null
+++ b/test/files/scalacheck/scan.scala
@@ -0,0 +1,17 @@
+import org.scalacheck._
+import Prop._
+import Gen._
+
+
+object Test extends Properties("TraversableLike.scanLeft") {
+ property("scanLeft") = forAll { (xs: List[Int], z: Int) => {
+ val sums = xs.scanLeft(z)(_ + _)
+ (xs.size == 0) || sums.zip(sums.tail).map(x => x._2 - x._1) == xs
+ }}
+}
+
+
+
+
+
+
diff --git a/test/files/scalap/caseClass/result.test b/test/files/scalap/caseClass/result.test
index cabf321a07..eb1ad74295 100644
--- a/test/files/scalap/caseClass/result.test
+++ b/test/files/scalap/caseClass/result.test
@@ -1,10 +1,9 @@
-case class CaseClass[A >: scala.Nothing <: scala.Seq[scala.Int]](i : A, s : scala.Predef.String) extends java.lang.Object with scala.ScalaObject with scala.Product {
+@scala.serializable
+case class CaseClass[A <: scala.Seq[scala.Int]](i : A, s : scala.Predef.String) extends java.lang.Object with scala.ScalaObject with scala.Product {
val i : A = { /* compiled code */ }
val s : scala.Predef.String = { /* compiled code */ }
def foo : scala.Int = { /* compiled code */ }
- def copy[A >: scala.Nothing <: scala.Seq[scala.Int]]() : CaseClass[A] = { /* compiled code */ }
- def copy$default$1[A >: scala.Nothing <: scala.Seq[scala.Int]] : A = { /* compiled code */ }
- def copy$default$2[A >: scala.Nothing <: scala.Seq[scala.Int]] : scala.Predef.String = { /* compiled code */ }
+ def copy[A <: scala.Seq[scala.Int]](i : A, s : scala.Predef.String) : CaseClass[A] = { /* compiled code */ }
override def hashCode() : scala.Int = { /* compiled code */ }
override def toString() : scala.Predef.String = { /* compiled code */ }
override def equals(x$1 : scala.Any) : scala.Boolean = { /* compiled code */ }
diff --git a/test/files/scalap/classPrivate/A.scala b/test/files/scalap/classPrivate/A.scala
new file mode 100644
index 0000000000..9f1bd34a6a
--- /dev/null
+++ b/test/files/scalap/classPrivate/A.scala
@@ -0,0 +1,9 @@
+class ClassPrivate {
+ private def foo = 1
+ private[ClassPrivate] def bar = 2
+ def baz = 3
+ class Outer {
+ private[ClassPrivate] def qux = 4
+ }
+ protected def quux = 5
+} \ No newline at end of file
diff --git a/test/files/scalap/classPrivate/result.test b/test/files/scalap/classPrivate/result.test
new file mode 100644
index 0000000000..0d12b779c3
--- /dev/null
+++ b/test/files/scalap/classPrivate/result.test
@@ -0,0 +1,10 @@
+class ClassPrivate extends java.lang.Object with scala.ScalaObject {
+ def this() = { /* compiled code */ }
+ def baz : scala.Int = { /* compiled code */ }
+ class Outer extends java.lang.Object with scala.ScalaObject {
+ def this() = { /* compiled code */ }
+ private[ClassPrivate] def qux : scala.Int = { /* compiled code */ }
+ }
+ protected def quux : scala.Int = { /* compiled code */ }
+ private[ClassPrivate] def bar : scala.Int = { /* compiled code */ }
+} \ No newline at end of file
diff --git a/test/files/scalap/classWithExistential/result.test b/test/files/scalap/classWithExistential/result.test
index 243f51e2ad..91afddaf0e 100644
--- a/test/files/scalap/classWithExistential/result.test
+++ b/test/files/scalap/classWithExistential/result.test
@@ -1,4 +1,4 @@
class ClassWithExistential extends java.lang.Object with scala.ScalaObject {
def this() = { /* compiled code */ }
- def foo[A >: scala.Nothing <: scala.Any, B >: scala.Nothing <: scala.Any] : scala.Function1[A, B forSome {type A >: scala.Nothing <: scala.Seq[scala.Int]; type B >: scala.Predef.String <: scala.Any}] = { /* compiled code */ }
-}
+ def foo[A, B] : scala.Function1[A, B forSome {type A <: scala.Seq[scala.Int]; type B >: scala.Predef.String}] = { /* compiled code */ }
+} \ No newline at end of file
diff --git a/test/files/scalap/covariantParam/result.test b/test/files/scalap/covariantParam/result.test
index ce480ee0cd..8acd9b497a 100644
--- a/test/files/scalap/covariantParam/result.test
+++ b/test/files/scalap/covariantParam/result.test
@@ -1,4 +1,4 @@
-class CovariantParam[+A >: scala.Nothing <: scala.Any] extends java.lang.Object with scala.ScalaObject {
+class CovariantParam[+A] extends java.lang.Object with scala.ScalaObject {
def this() = { /* compiled code */ }
- def foo[A >: scala.Nothing <: scala.Any](a : A) : scala.Int = { /* compiled code */ }
+ def foo[A](a : A) : scala.Int = { /* compiled code */ }
}
diff --git a/test/files/scalap/defaultParameter/A.scala b/test/files/scalap/defaultParameter/A.scala
new file mode 100644
index 0000000000..d3514952f4
--- /dev/null
+++ b/test/files/scalap/defaultParameter/A.scala
@@ -0,0 +1,3 @@
+trait DefaultParameter {
+ def foo(s: String = "hello"): Unit
+} \ No newline at end of file
diff --git a/test/files/scalap/defaultParameter/result.test b/test/files/scalap/defaultParameter/result.test
new file mode 100644
index 0000000000..38bf6ac4e3
--- /dev/null
+++ b/test/files/scalap/defaultParameter/result.test
@@ -0,0 +1,3 @@
+trait DefaultParameter extends java.lang.Object {
+ def foo(s : scala.Predef.String) : scala.Unit
+} \ No newline at end of file
diff --git a/test/files/scalap/typeAnnotations/A.scala b/test/files/scalap/typeAnnotations/A.scala
new file mode 100644
index 0000000000..ff2445edc9
--- /dev/null
+++ b/test/files/scalap/typeAnnotations/A.scala
@@ -0,0 +1,9 @@
+abstract class TypeAnnotations[@specialized R] {
+ @specialized val x = 10
+ @specialized type T
+
+ def compose[@specialized A](x: A, y: R): A = {
+ val y: A = x
+ x
+ }
+} \ No newline at end of file
diff --git a/test/files/scalap/typeAnnotations/result.test b/test/files/scalap/typeAnnotations/result.test
new file mode 100644
index 0000000000..b565d6185b
--- /dev/null
+++ b/test/files/scalap/typeAnnotations/result.test
@@ -0,0 +1,8 @@
+abstract class TypeAnnotations[@scala.specialized R] extends java.lang.Object with scala.ScalaObject {
+ def this() = { /* compiled code */ }
+ @scala.specialized
+ val x : scala.Int = { /* compiled code */ }
+ @scala.specialized
+ type T
+ def compose[@scala.specialized A](x : A, y : R) : A = { /* compiled code */ }
+} \ No newline at end of file
diff --git a/test/files/script/fact.scala b/test/files/script/fact.scala
index d48dac6f0f..d48dac6f0f 100644..100755
--- a/test/files/script/fact.scala
+++ b/test/files/script/fact.scala
diff --git a/test/partest b/test/partest
index 1405efa0c0..124c4d605e 100755
--- a/test/partest
+++ b/test/partest
@@ -75,7 +75,23 @@ if $cygwin; then
fi
# Reminder: substitution ${JAVA_OPTS:=-Xmx256M -Xms16M} DO NOT work on Solaris
-[ -n "$JAVA_OPTS" ] || JAVA_OPTS="-Xmx512M -Xms16M"
-[ -n "$SCALAC_OPTS" ] || SCALAC_OPTS="-deprecation"
+[ -n "$JAVA_OPTS" ] || JAVA_OPTS="-Xms512M -Xmx1536M -Xss1M -XX:MaxPermSize=128M"
+[ -n "$SCALAC_OPTS" ] || SCALAC_OPTS=""
-${JAVACMD:=java} $JAVA_OPTS -cp "$EXT_CLASSPATH" -Dpartest.debug="${PARTEST_DEBUG}" -Dscala.home="${SCALA_HOME}" -Dscalatest.javacmd="${JAVACMD}" -Dscalatest.java_opts="${JAVA_OPTS}" -Dscalatest.scalac_opts="${SCALAC_OPTS}" -Dscalatest.javac_cmd="${JAVA_HOME}/bin/javac" scala.tools.partest.nest.NestRunner "$@"
+export SCALAC_OPTS
+export JAVA_OPTS
+export JAVACMD
+
+${JAVACMD:=java} $JAVA_OPTS \
+ -cp "$EXT_CLASSPATH" \
+ -Dscala.home="${SCALA_HOME}" \
+ scala.tools.partest.Runner "$@"
+
+# ${JAVACMD:=java} $JAVA_OPTS \
+# -cp "$EXT_CLASSPATH" \
+# -Dscala.home="${SCALA_HOME}" \
+# -Dpartest.scalac_opts="${SCALAC_OPTS}" \
+# -Dpartest.javacmd="${JAVACMD}" \
+# -Dpartest.java_opts="${JAVA_OPTS}" \
+# -Dpartest.javac_cmd="${JAVA_HOME}/bin/javac" \
+# scala.tools.partest.Runner "$@"
diff --git a/test/partest.bat b/test/partest.bat
index 861c0a2465..0b5618a164 100755
--- a/test/partest.bat
+++ b/test/partest.bat
@@ -53,10 +53,10 @@ if "%_EXTENSION_CLASSPATH%"=="" (
)
)
-set _PROPS=-Dscala.home="%_SCALA_HOME%" -Dscalatest.javacmd="%_JAVACMD%" -Dscalatest.java_options="%_JAVA_OPTS%" -Dscalatest.scalac_options="%_SCALAC_OPTS%" -Dscalatest.javac_cmd="%JAVA_HOME%\bin\javac"
+set _PROPS=-Dscala.home="%_SCALA_HOME%" -Dpartest.javacmd="%_JAVACMD%" -Dpartest.java_options="%_JAVA_OPTS%" -Dpartest.scalac_options="%_SCALAC_OPTS%" -Dpartest.javac_cmd="%JAVA_HOME%\bin\javac"
-rem echo %_JAVACMD% %_JAVA_OPTS% %_PROPS% -cp "%_EXTENSION_CLASSPATH%" scala.tools.partest.nest.NestRunner %_ARGS%
-%_JAVACMD% %_JAVA_OPTS% %_PROPS% -cp "%_EXTENSION_CLASSPATH%" scala.tools.partest.nest.NestRunner %_ARGS%
+rem echo %_JAVACMD% %_JAVA_OPTS% %_PROPS% -cp "%_EXTENSION_CLASSPATH%" scala.tools.partest.Runner %_ARGS%
+%_JAVACMD% %_JAVA_OPTS% %_PROPS% -cp "%_EXTENSION_CLASSPATH%" scala.tools.partest.Runner %_ARGS%
goto end
rem ##########################################################################
diff --git a/test/pending/buildmanager/t2443/BitSet.scala b/test/pending/buildmanager/t2443/BitSet.scala
new file mode 100644
index 0000000000..8d7c8dcd23
--- /dev/null
+++ b/test/pending/buildmanager/t2443/BitSet.scala
@@ -0,0 +1,2 @@
+import scala.collection.BitSet
+//class BitSet
diff --git a/test/pending/buildmanager/t2443/t2443.changes/BitSet2.scala b/test/pending/buildmanager/t2443/t2443.changes/BitSet2.scala
new file mode 100644
index 0000000000..27a5d4de9f
--- /dev/null
+++ b/test/pending/buildmanager/t2443/t2443.changes/BitSet2.scala
@@ -0,0 +1 @@
+import scala.collection.BitSet
diff --git a/test/pending/buildmanager/t2443/t2443.check b/test/pending/buildmanager/t2443/t2443.check
new file mode 100644
index 0000000000..dd88e1ceb9
--- /dev/null
+++ b/test/pending/buildmanager/t2443/t2443.check
@@ -0,0 +1,6 @@
+builder > BitSet.scala
+compiling Set(BitSet.scala)
+builder > BitSet.scala
+Changes: Map(class BitSet -> List(Removed(Class(BitSet))))
+
+
diff --git a/test/pending/buildmanager/t2443/t2443.test b/test/pending/buildmanager/t2443/t2443.test
new file mode 100644
index 0000000000..a1d61ff5a3
--- /dev/null
+++ b/test/pending/buildmanager/t2443/t2443.test
@@ -0,0 +1,3 @@
+>>compile BitSet.scala
+>>update BitSet.scala=>BitSet2.scala
+>>compile BitSet.scala
diff --git a/test/pending/continuations-run/example0.scala b/test/pending/continuations-run/example0.scala
new file mode 100644
index 0000000000..44b1331339
--- /dev/null
+++ b/test/pending/continuations-run/example0.scala
@@ -0,0 +1,9 @@
+// $Id$
+
+object Test {
+
+ def main(args: Array[String]): Any = {
+ examples.continuations.Test0.main(args)
+ }
+
+} \ No newline at end of file
diff --git a/test/pending/continuations-run/example1.scala b/test/pending/continuations-run/example1.scala
new file mode 100644
index 0000000000..195a98e59f
--- /dev/null
+++ b/test/pending/continuations-run/example1.scala
@@ -0,0 +1,9 @@
+// $Id$
+
+object Test {
+
+ def main(args: Array[String]): Any = {
+ examples.continuations.Test1.main(args)
+ }
+
+} \ No newline at end of file
diff --git a/test/pending/continuations-run/example16.scala b/test/pending/continuations-run/example16.scala
new file mode 100644
index 0000000000..5eb64046ed
--- /dev/null
+++ b/test/pending/continuations-run/example16.scala
@@ -0,0 +1,9 @@
+// $Id$
+
+object Test {
+
+ def main(args: Array[String]): Any = {
+ examples.continuations.Test16Printf.main(args)
+ }
+
+} \ No newline at end of file
diff --git a/test/pending/continuations-run/example2.scala b/test/pending/continuations-run/example2.scala
new file mode 100644
index 0000000000..0d96257c40
--- /dev/null
+++ b/test/pending/continuations-run/example2.scala
@@ -0,0 +1,9 @@
+// $Id$
+
+object Test {
+
+ def main(args: Array[String]): Any = {
+ examples.continuations.Test2.main(args)
+ }
+
+} \ No newline at end of file
diff --git a/test/pending/continuations-run/example3.scala b/test/pending/continuations-run/example3.scala
new file mode 100644
index 0000000000..3f5052a4ad
--- /dev/null
+++ b/test/pending/continuations-run/example3.scala
@@ -0,0 +1,9 @@
+// $Id$
+
+object Test {
+
+ def main(args: Array[String]): Any = {
+ examples.continuations.Test3.main(args)
+ }
+
+} \ No newline at end of file
diff --git a/test/pending/continuations-run/example4.scala b/test/pending/continuations-run/example4.scala
new file mode 100644
index 0000000000..66c6774791
--- /dev/null
+++ b/test/pending/continuations-run/example4.scala
@@ -0,0 +1,9 @@
+// $Id$
+
+object Test {
+
+ def main(args: Array[String]): Any = {
+ examples.continuations.Test4.main(args)
+ }
+
+} \ No newline at end of file
diff --git a/test/pending/continuations-run/example5.scala b/test/pending/continuations-run/example5.scala
new file mode 100644
index 0000000000..0994bdee8a
--- /dev/null
+++ b/test/pending/continuations-run/example5.scala
@@ -0,0 +1,9 @@
+// $Id$
+
+object Test {
+
+ def main(args: Array[String]): Any = {
+ examples.continuations.Test5.main(args)
+ }
+
+} \ No newline at end of file
diff --git a/test/pending/continuations-run/example6.scala b/test/pending/continuations-run/example6.scala
new file mode 100644
index 0000000000..5207e3fc68
--- /dev/null
+++ b/test/pending/continuations-run/example6.scala
@@ -0,0 +1,9 @@
+// $Id$
+
+object Test {
+
+ def main(args: Array[String]): Any = {
+ examples.continuations.Test6.main(args)
+ }
+
+} \ No newline at end of file
diff --git a/test/pending/continuations-run/example7.scala b/test/pending/continuations-run/example7.scala
new file mode 100644
index 0000000000..fb22387dac
--- /dev/null
+++ b/test/pending/continuations-run/example7.scala
@@ -0,0 +1,9 @@
+// $Id$
+
+object Test {
+
+ def main(args: Array[String]): Any = {
+ examples.continuations.Test7.main(args)
+ }
+
+} \ No newline at end of file
diff --git a/test/pending/continuations-run/example8.scala b/test/pending/continuations-run/example8.scala
new file mode 100644
index 0000000000..8e21e6c674
--- /dev/null
+++ b/test/pending/continuations-run/example8.scala
@@ -0,0 +1,9 @@
+// $Id$
+
+object Test {
+
+ def main(args: Array[String]): Any = {
+ examples.continuations.Test8.main(args)
+ }
+
+} \ No newline at end of file
diff --git a/test/pending/continuations-run/example9.scala b/test/pending/continuations-run/example9.scala
new file mode 100644
index 0000000000..0f27c686f7
--- /dev/null
+++ b/test/pending/continuations-run/example9.scala
@@ -0,0 +1,9 @@
+// $Id$
+
+object Test {
+
+ def main(args: Array[String]): Any = {
+ examples.continuations.Test9Monads.main(args)
+ }
+
+} \ No newline at end of file
diff --git a/test/pending/continuations-run/foreach.check b/test/pending/continuations-run/foreach.check
new file mode 100644
index 0000000000..9bab7a2eed
--- /dev/null
+++ b/test/pending/continuations-run/foreach.check
@@ -0,0 +1,4 @@
+1
+2
+3
+enough is enough \ No newline at end of file
diff --git a/test/pending/continuations-run/foreach.scala b/test/pending/continuations-run/foreach.scala
new file mode 100644
index 0000000000..4daade452c
--- /dev/null
+++ b/test/pending/continuations-run/foreach.scala
@@ -0,0 +1,33 @@
+// $Id$
+
+import scala.util.continuations._
+
+import scala.util.continuations.Loops._
+
+object Test {
+
+ def main(args: Array[String]): Any = {
+
+
+ reset {
+
+ val list = List(1,2,3,4,5)
+
+ for (x <- list.suspendable) {
+
+ shift { k: (Unit => Unit) =>
+ println(x)
+ if (x < 3)
+ k()
+ else
+ println("enough is enough")
+ }
+
+ }
+
+ }
+
+
+ }
+
+} \ No newline at end of file
diff --git a/test/pending/jvm/actor-executor4.check b/test/pending/jvm/actor-executor4.check
new file mode 100644
index 0000000000..da78f45836
--- /dev/null
+++ b/test/pending/jvm/actor-executor4.check
@@ -0,0 +1,21 @@
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+Two: OK
+One: OK
+One exited
diff --git a/test/pending/jvm/actor-executor4.scala b/test/pending/jvm/actor-executor4.scala
new file mode 100644
index 0000000000..a912d76094
--- /dev/null
+++ b/test/pending/jvm/actor-executor4.scala
@@ -0,0 +1,64 @@
+import scala.actors.{Actor, Exit}
+import scala.actors.scheduler.ExecutorScheduler
+import java.util.concurrent.Executors
+
+object One extends AdaptedActor {
+ def act() {
+ Two.start()
+ var i = 0
+ loopWhile (i < Test.NUM_MSG) {
+ i += 1
+ Two ! 'MsgForTwo
+ react {
+ case 'MsgForOne =>
+ if (i % (Test.NUM_MSG/10) == 0)
+ println("One: OK")
+ }
+ }
+ }
+}
+
+object Two extends AdaptedActor {
+ def act() {
+ var i = 0
+ loopWhile (i < Test.NUM_MSG) {
+ i += 1
+ react {
+ case 'MsgForTwo =>
+ if (i % (Test.NUM_MSG/10) == 0)
+ println("Two: OK")
+ One ! 'MsgForOne
+ }
+ }
+ }
+}
+
+trait AdaptedActor extends Actor {
+ override def scheduler =
+ Test.scheduler
+}
+
+object Test {
+ val NUM_MSG = 100000
+
+ val scheduler =
+ ExecutorScheduler(
+ Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors()),
+ false)
+
+ def main(args: Array[String]) {
+ (new AdaptedActor {
+ def act() {
+ trapExit = true
+ link(One)
+ One.start()
+
+ receive {
+ case Exit(from, reason) =>
+ println("One exited")
+ Test.scheduler.shutdown()
+ }
+ }
+ }).start()
+ }
+}
diff --git a/test/files/neg/bug1210.check b/test/pending/neg/bug1210.check
index 4db920556f..4db920556f 100644
--- a/test/files/neg/bug1210.check
+++ b/test/pending/neg/bug1210.check
diff --git a/test/pending/pos/t0644.scala b/test/pending/pos/t0644.scala
deleted file mode 100644
index 5ad12c3632..0000000000
--- a/test/pending/pos/t0644.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-class A {
- def appply(): Int = 0
- def update(n: Int) {}
-}
-
-class B extends A {
- this()
- this()=1
- super()
- super()=1
-}
diff --git a/test/files/pos/t1380/gnujaxp.jar.desired.sha1 b/test/pending/pos/t1380/gnujaxp.jar.desired.sha1
index c155c2aaa2..c155c2aaa2 100644
--- a/test/files/pos/t1380/gnujaxp.jar.desired.sha1
+++ b/test/pending/pos/t1380/gnujaxp.jar.desired.sha1
diff --git a/test/files/pos/t1380/hallo.scala b/test/pending/pos/t1380/hallo.scala
index 27ecd9fb8b..27ecd9fb8b 100644
--- a/test/files/pos/t1380/hallo.scala
+++ b/test/pending/pos/t1380/hallo.scala
diff --git a/test/pending/pos/t1659.scala b/test/pending/pos/t1659.scala
deleted file mode 100644
index 10470d66f8..0000000000
--- a/test/pending/pos/t1659.scala
+++ /dev/null
@@ -1,4 +0,0 @@
-trait Y { type X }
-trait W { def u[A](v : Y { type X = A }) : Unit }
-class Z extends W { def u[A](v : Y { type X = A }) = null }
-
diff --git a/test/pending/pos/t2060.scala b/test/pending/pos/t2060.scala
deleted file mode 100644
index 3f47259849..0000000000
--- a/test/pending/pos/t2060.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-object Test {
- class Op[I];
- class IntOp extends Op[Int];
-
- class Rich(x : Double) {
- def + (op : IntOp) = op;
- def + [I](op : Op[I]) = op;
- def plus [I](op : Op[I]) = op;
- }
-
- implicit def iToRich(x : Double) =
- new Rich(x);
-
- // fails to compile
- val failure = 1.0 + new Op[Int];
-
- // works as expected --
- // problem isn't in adding new "+"
- val a = 1.0 + new IntOp;
-
- // works as expected --
- // problem isn't in binding type variable I
- val b = 1.0 plus new Op[Int];
-
- // works as expected --
- // problem isn't in using Rich.+[I](op : Op[I])
- val c = iToRich(1.0) + new Op[Int];
-}
diff --git a/test/files/run/bug1697.scala b/test/pending/run/bug1697.scala
index 01590dd405..01590dd405 100644
--- a/test/files/run/bug1697.scala
+++ b/test/pending/run/bug1697.scala
diff --git a/test/pending/run/bug2365/run b/test/pending/run/bug2365/run
deleted file mode 100755
index f3c44ad086..0000000000
--- a/test/pending/run/bug2365/run
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/bin/sh
-#
-# This script should fail with any build of scala where #2365
-# is not fixed, and otherwise succeed. Failure means running out
-# of PermGen space.
-
-CP=.:/local/lib/java/ivy.jar
-# SCALAC=/scala/inst/28/bin/scalac
-SCALAC=scalac
-RUN_OPTS="-XX:MaxPermSize=25M -verbose:gc"
-
-$SCALAC -cp $CP *.scala
-JAVA_OPTS="${RUN_OPTS}" scala -cp $CP Test
diff --git a/test/pending/run/bugs425-and-816.scala b/test/pending/run/bugs425-and-816.scala
deleted file mode 100644
index d9267d06af..0000000000
--- a/test/pending/run/bugs425-and-816.scala
+++ /dev/null
@@ -1,27 +0,0 @@
-object Test {
- object bug425 {
- case class A(x: Int)
- case class B(override val x: Int, y: Double) extends A(x)
-
- val b: A = B(5, 3.3)
- b match {
- case B(x, y) => Console.println(y)
- case A(x) => Console.println(x)
- }
- }
-
- object bug816 {
- abstract class Atest(val data: String)
-
- case class Btest(override val data: String, val b: boolean) extends Atest(data)
-
- case class Ctest(override val data: String) extends Btest(data, true)
-
- class testCaseClass {
- def test(x: Atest) = x match {
- case Ctest(data) => Console.println("C")
- case Btest(data, b) => Console.println("B")
- }
- }
- }
-}
diff --git a/test/pending/run/instanceOfAndTypeMatching.scala b/test/pending/run/instanceOfAndTypeMatching.scala
new file mode 100644
index 0000000000..9ab2d6c3c4
--- /dev/null
+++ b/test/pending/run/instanceOfAndTypeMatching.scala
@@ -0,0 +1,193 @@
+// Summary of incorrect or questionable behavior.
+// Full code and successful parts follow.
+
+object Summary {
+ class Outer {
+ class Inner { }
+ def f() = { class MethodInner ; new MethodInner }
+ }
+
+ // 1 static issue:
+ //
+ // Given method in MethodInner: def g(other: MethodInner) = ()
+ // method1.g(method1) fails to compile with type error.
+ //
+ // Note that this cannot be worked around by widening the return type
+ // of f() because MethodInner is declared inside of f. So there is no way
+ // I see for a class declared inside a method to receive members of its
+ // own declared type -- not only the narrow type of those from this
+ // instance, but ANY members, because there is no Foo#Bar syntax which will
+ // traverse a method.
+ //
+ // 4 runtime issues:
+ //
+ // From the outside: inner1.isInstanceOf[outer2.Inner] is true, should (maybe) be false
+ // From inside inner1: inner2.isInstanceOf[Outer.this.Inner] is true, should (maybe) be false
+ // From the outside: inner1 match { case _: outer2.Inner => true ... } is true, should definitely be false
+ // From inside method1: method2 match { case _: MethodInner => true ... } is true, should definitely be false
+ //
+ // Note that the fact that every test returns true on instances of MethodInner means
+ // that it is impossible to draw any type distinction between instances. As far as one
+ // can tell, they are all of the same type regardless not only of whether they were
+ // created on the same method invocation but whether they are contained in the same
+ // instance of Outer.
+ //
+ // WRT "same method invocation", see Iterator.duplicate for an example of this.
+}
+
+// Tests
+
+class Outer {
+ class Inner {
+ def passOuter(other: Outer) = () // pass any Outer
+ def passThisType(other: Outer.this.type) = () // pass only this Outer instance
+ def passInner(other: Inner) = () // pass only Inners from this Outer instance
+ def passInner2(other: Outer.this.Inner) = () // same as above
+ def passInnerSharp(other: Outer#Inner) = () // pass any Inner
+
+ def compareSimpleWithTypeMatch(other: Any) = other match {
+ case _: Inner => true
+ case _ => false
+ }
+ def compareSimpleWithInstanceOf(other: Any) = other.isInstanceOf[Inner]
+
+ def compareSharpWithTypeMatch(other: Any) = {
+ other match {
+ case _: Outer#Inner => true
+ case _ => false
+ }
+ }
+ def compareSharpWithInstanceOf(other: Any) = other.isInstanceOf[Outer#Inner]
+
+ def comparePathWithTypeMatch(other: Any) = other match {
+ case _: Outer.this.Inner => true
+ case _ => false
+ }
+ def comparePathWithInstanceOf(other: Any) = other.isInstanceOf[Outer.this.Inner]
+ }
+
+ def f() = {
+ class MethodInner {
+ def passOuter(other: Outer) = () // pass any Outer
+ def passThisType(other: Outer.this.type) = () // pass only this Outer instance
+ def passInner(other: Inner) = () // pass only Inners from this Outer instance
+ def passInner2(other: Outer.this.Inner) = () // same as above
+ def passInnerSharp(other: Outer#Inner) = () // pass any Inner
+ def passMethodInner(other: MethodInner) = () // pass only MethodInners from this Outer instance
+ // is there any way to refer to Outer#MethodInner? Not that there should be.
+
+ def compareWithInstanceOf(other: Any) = other.isInstanceOf[MethodInner]
+ def compareWithTypeMatch(other: Any) = other match {
+ case _: MethodInner => true
+ case _ => false
+ }
+ }
+
+ new MethodInner
+ }
+}
+
+object Test
+{
+ val outer1 = new Outer
+ val outer2 = new Outer
+ val inner1 = new outer1.Inner
+ val inner2 = new outer2.Inner
+ val method1 = outer1.f()
+ val method2 = outer2.f()
+
+ def testInnerStatic = {
+ // these should all work
+ inner1.passOuter(outer1)
+ inner1.passOuter(outer2)
+ inner1.passThisType(outer1)
+ inner1.passInner(inner1)
+ inner1.passInner2(inner1)
+ inner1.passInnerSharp(inner1)
+ inner1.passInnerSharp(inner2)
+
+ // these should all fail to compile, and do
+ //
+ // inner1.passThisType(outer2)
+ // inner1.passInner(inner2)
+ // inner1.passInner2(inner2)
+ }
+ def testInnerRuntime = {
+ println("testInnerRuntime\n")
+
+ List("These should be true under any scenario: ",
+ inner1.isInstanceOf[outer1.Inner] ,
+ inner1.isInstanceOf[Outer#Inner] ,
+ inner1 match { case _: Outer#Inner => true ; case _ => false } ,
+ inner1 match { case _: outer1.Inner => true ; case _ => false } ,
+ inner1.compareSharpWithTypeMatch(inner2) ,
+ inner1.compareSharpWithInstanceOf(inner2)
+ ) foreach println
+
+ List("These should be true under current proposal: ",
+ inner1.compareSimpleWithInstanceOf(inner2)
+ ) foreach println
+
+ List("These should be false under current proposal: ",
+ inner1.compareSimpleWithTypeMatch(inner2) ,
+ inner1.comparePathWithTypeMatch(inner2)
+ ) foreach println
+
+ List("These return true but I think should return false: ",
+ inner1.isInstanceOf[outer2.Inner] , // true
+ inner1.comparePathWithInstanceOf(inner2) // true
+ ) foreach println
+
+ List("These are doing the wrong thing under current proposal",
+ inner1 match { case _: outer2.Inner => true ; case _ => false } // should be false
+ ) foreach println
+ }
+
+ def testMethodInnerStatic = {
+ // these should all work
+ method1.passOuter(outer1)
+ method1.passOuter(outer2)
+ method1.passThisType(outer1)
+ method1.passInner(inner1)
+ method1.passInner2(inner1)
+ method1.passInnerSharp(inner1)
+ method1.passInnerSharp(inner2)
+ // This fails with:
+ //
+ // a.scala:114: error: type mismatch;
+ // found : Test.method1.type (with underlying type MethodInner forSome { type MethodInner <: java.lang.Object with ScalaObject{def passOuter(other: Outer): Unit; def passThisType(other: Test.outer1.type): Unit; def passInner(other: Test.outer1.Inner): Unit; def passInner2(other: Test.outer1.Inner): Unit; def passInnerSharp(other: Outer#Inner): Unit; def passMethodInner(other: MethodInner): Unit} })
+ // required: MethodInner where type MethodInner <: java.lang.Object with ScalaObject{def passOuter(other: Outer): Unit; def passThisType(other: Test.outer1.type): Unit; def passInner(other: Test.outer1.Inner): Unit; def passInner2(other: Test.outer1.Inner): Unit; def passInnerSharp(other: Outer#Inner): Unit; def passMethodInner(other: MethodInner): Unit}
+ // method1.passMethodInner(method1)
+ // ^
+ // method1.passMethodInner(method1)
+
+ // these should all fail to compile, and do
+ //
+ // method1.passThisType(outer2)
+ // method1.passInner(inner2)
+ // method1.passInner2(inner2)
+ // method1.passMethodInner(method2)
+ }
+
+ def testMethodInnerRuntime = {
+ println("\ntestMethodInnerRuntime\n")
+
+ List("These should be true under any scenario: ",
+ method1.compareWithInstanceOf(method1) ,
+ method1.compareWithTypeMatch(method1)
+ ) foreach println
+
+ List("These should be true under current proposal: ",
+ method1.compareWithInstanceOf(method2)
+ ) foreach println
+
+ List("These are doing the wrong thing under current proposal",
+ method1.compareWithTypeMatch(method2) // should be false
+ ) foreach println
+ }
+
+ def main(args: Array[String]): Unit = {
+ testInnerRuntime
+ testMethodInnerRuntime
+ }
+}
diff --git a/test/pending/run/subarray.check b/test/pending/run/subarray.check
deleted file mode 100644
index 814f4a4229..0000000000
--- a/test/pending/run/subarray.check
+++ /dev/null
@@ -1,2 +0,0 @@
-one
-two
diff --git a/test/pending/run/bug2365/bug2365.javaopts b/test/pending/script/bug2365.javaopts
index 357e033c1c..357e033c1c 100644
--- a/test/pending/run/bug2365/bug2365.javaopts
+++ b/test/pending/script/bug2365.javaopts
diff --git a/test/pending/run/bug2365/Test.scala b/test/pending/script/bug2365/Test.scala
index 92b58f4a25..53581d256b 100644
--- a/test/pending/run/bug2365/Test.scala
+++ b/test/pending/script/bug2365/Test.scala
@@ -20,11 +20,11 @@ object Test
def test(withF0: StructF0 => Int): Int = {
// Some large jar
- val ivyJar = File("/local/lib/java/ivy.jar").toURL
+ val jar = File("../../../../lib/scalacheck.jar").toURL
// load a class in a separate loader that will be passed to A
- val loader = new java.net.URLClassLoader(Array(File(".").toURL, ivyJar))
+ val loader = new java.net.URLClassLoader(Array(File(".").toURL, jar))
// load a real class to fill perm gen space
- Class.forName("org.apache.ivy.Ivy", true, loader).newInstance
+ Class.forName("org.scalacheck.Properties", true, loader).newInstance
// create a class from another class loader with an apply: Int method
val b = Class.forName("B", true, loader).newInstance
diff --git a/test/pending/script/bug2365/bug2365.scala b/test/pending/script/bug2365/bug2365.scala
new file mode 100755
index 0000000000..b5e05325cf
--- /dev/null
+++ b/test/pending/script/bug2365/bug2365.scala
@@ -0,0 +1,9 @@
+#!/bin/sh
+#
+# This script should fail with any build of scala where #2365
+# is not fixed, and otherwise succeed. Failure means running out
+# of PermGen space.
+#
+
+scalac -cp .:/local/lib/java/ivy.jar Test.scala
+JAVA_OPTS="-XX:MaxPermSize=25M -verbose:gc" scalac -cp $CP Test
diff --git a/test/files/jvm/NestedAnnotations.java b/test/support/annotations/NestedAnnotations.java
index 8f2327dcce..8f2327dcce 100644
--- a/test/files/jvm/NestedAnnotations.java
+++ b/test/support/annotations/NestedAnnotations.java
diff --git a/test/files/jvm/OuterEnum.java b/test/support/annotations/OuterEnum.java
index 75d3f34223..75d3f34223 100644
--- a/test/files/jvm/OuterEnum.java
+++ b/test/support/annotations/OuterEnum.java
diff --git a/test/files/jvm/OuterTParams.java b/test/support/annotations/OuterTParams.java
index 1d3db49fcf..1d3db49fcf 100644
--- a/test/files/jvm/OuterTParams.java
+++ b/test/support/annotations/OuterTParams.java
diff --git a/test/files/jvm/SourceAnnotation.java b/test/support/annotations/SourceAnnotation.java
index 047751ddfe..047751ddfe 100644
--- a/test/files/jvm/SourceAnnotation.java
+++ b/test/support/annotations/SourceAnnotation.java
diff --git a/test/files/jvm/mkAnnotationsJar.sh b/test/support/annotations/mkAnnotationsJar.sh
index 3d69351165..3d69351165 100755
--- a/test/files/jvm/mkAnnotationsJar.sh
+++ b/test/support/annotations/mkAnnotationsJar.sh
diff --git a/tools/abspath b/tools/abspath
new file mode 100755
index 0000000000..a2d1410b9b
--- /dev/null
+++ b/tools/abspath
@@ -0,0 +1,9 @@
+#!/bin/sh
+#
+# print the absolute path of each argument
+
+for relpath in $* ; do
+ D=`dirname "$relpath"`
+ B=`basename "$relpath"`
+ echo "`cd \"$D\" 2>/dev/null && pwd || echo \"$D\"`/$B"
+done \ No newline at end of file
diff --git a/tools/cpof b/tools/cpof
new file mode 100755
index 0000000000..ab5a42b4fb
--- /dev/null
+++ b/tools/cpof
@@ -0,0 +1,30 @@
+#!/bin/sh
+#
+# Creates a classpath out of the contents of each directory
+# given as an argument.
+
+if [ $# == 0 ] ; then
+ echo "Usage: $0 [dir1 dir2 ...]"
+ exit 1
+fi
+
+THISDIR=`dirname $0`
+ABSCMD="${THISDIR}/abspath"
+CPRES=""
+
+for dir in $* ; do
+ absdir=`${ABSCMD} $dir`
+ LS=`ls -1 ${absdir}`
+
+ for x in $LS ; do
+ ABS=`${ABSCMD} "${absdir}/${x}"`
+ CPRES="${CPRES}:${ABS}"
+ done
+done
+
+# shaving the : off the beginning. Applause to /bin/sh for
+# keeping us humble about how far we've come.
+LEN=$(( ${#CPRES} - 1 ))
+result=${CPRES:1:${LEN}}
+
+echo $result
diff --git a/tools/diffPickled b/tools/diffPickled
new file mode 100755
index 0000000000..b4a345dc7d
--- /dev/null
+++ b/tools/diffPickled
@@ -0,0 +1,51 @@
+#!/bin/sh
+#
+# Shows the difference in pickler output between two variations on a class.
+#
+# If quick and strap are built normally you can run
+#
+# diffPickled foo.bar.Baz
+#
+# to see any differences between them in that class.
+
+USAGE="Usage: $0 classpath1 classpath2 class"
+TOOLSDIR=`dirname $0`
+BUILDDIR="${TOOLSDIR}/../build"
+QUICKDIR="${BUILDDIR}/quick"
+STRAPDIR="${BUILDDIR}/strap"
+
+CP1=""
+CP2=""
+CLASS=""
+
+if [ $# == 1 ] ; then
+ if [ -e ${QUICKDIR} ] && [ -e ${STRAPDIR} ] ; then
+ CP1=`${TOOLSDIR}/quickcp`
+ CP2=`${TOOLSDIR}/strapcp`
+ CLASS=$1
+ else
+ echo $USAGE
+ echo "(If only one argument is given, $QUICKDIR and $STRAPDIR must exist.)"
+ exit 1
+ fi
+elif [ $# == 3 ] ; then
+ CP1=$1
+ CP2=$2
+ CLASS=$3
+else
+ echo $USAGE
+ exit 1
+fi
+
+TMPDIR="/tmp/scala_pickle_diff${RANDOM}"
+
+if mkdir -m 0700 "$TMPDIR" 2>/dev/null ; then
+ ${TOOLSDIR}/showPickled -cp $CP1 $CLASS > "${TMPDIR}/out1.txt"
+ ${TOOLSDIR}/showPickled -cp $CP2 $CLASS > "${TMPDIR}/out2.txt"
+ diff "${TMPDIR}/out1.txt" "${TMPDIR}/out2.txt"
+ rm -rf ${TMPDIR}
+else
+ echo "Failed to create temporary directory ${TMPDIR}."
+ exit 1
+fi
+
diff --git a/tools/git-get-rev b/tools/git-get-rev
new file mode 100755
index 0000000000..9adda35ca7
--- /dev/null
+++ b/tools/git-get-rev
@@ -0,0 +1,5 @@
+#!/bin/sh
+#
+
+GIT_PAGER=cat
+git log -10 | grep git-svn-id | head -1 \ No newline at end of file
diff --git a/tools/packcp b/tools/packcp
new file mode 100755
index 0000000000..42bce9e266
--- /dev/null
+++ b/tools/packcp
@@ -0,0 +1,5 @@
+#!/bin/sh
+#
+
+THISDIR=`dirname $0`
+${THISDIR}/cpof ${THISDIR}/../build/pack/lib
diff --git a/tools/pathResolver b/tools/pathResolver
new file mode 100755
index 0000000000..efff45ea62
--- /dev/null
+++ b/tools/pathResolver
@@ -0,0 +1,11 @@
+#!/bin/sh
+#
+
+WHICH=`which scala`
+BASE=`dirname $WHICH`
+LIBDIR=$BASE/../lib
+
+echo Using ${WHICH}.
+echo
+
+java -cp "${LIBDIR}/*" scala.tools.util.PathResolver $*
diff --git a/tools/quickcp b/tools/quickcp
new file mode 100755
index 0000000000..0bfcad1941
--- /dev/null
+++ b/tools/quickcp
@@ -0,0 +1,8 @@
+#!/bin/sh
+#
+
+THISDIR=`dirname $0`
+cp=`${THISDIR}/cpof ${THISDIR}/../build/quick/classes`
+fjbg=`${THISDIR}/abspath ${THISDIR}/../lib/fjbg.jar`
+
+echo ${cp}:${fjbg}
diff --git a/tools/scalawhich b/tools/scalawhich
new file mode 100755
index 0000000000..6a4b1788a8
--- /dev/null
+++ b/tools/scalawhich
@@ -0,0 +1,4 @@
+#!/bin/sh
+#
+
+scala scala.tools.util.Which $*
diff --git a/tools/showPickled b/tools/showPickled
new file mode 100755
index 0000000000..27421c3ae5
--- /dev/null
+++ b/tools/showPickled
@@ -0,0 +1,32 @@
+#!/bin/sh
+#
+# Shows the pickled scala data in a classfile.
+
+if [ $# == 0 ] ; then
+ echo "Usage: $0 [--bare] [-cp classpath] <class*>"
+ exit 1
+fi
+
+TOOLSDIR=`dirname $0`
+CPOF="$TOOLSDIR/cpof"
+
+PACK="$TOOLSDIR/../build/pack/lib"
+QUICK="$TOOLSDIR/../build/quick/classes"
+STARR="$TOOLSDIR/../lib"
+CP=""
+
+if [ -f "${PACK}/scala-library.jar" ] ; then
+ CP=`${TOOLSDIR}/packcp`
+elif [ -d "${QUICK}/library" ] ; then
+ CP=`${TOOLSDIR}/quickcp`
+else
+ CP=`${TOOLSDIR}/starrcp`
+fi
+
+if [ "$1" == "-cp" ] ; then
+ shift
+ CP="${1}:${CP}"
+ shift
+fi
+
+java -cp "$CP" scala.tools.nsc.util.ShowPickled $*
diff --git a/tools/starrcp b/tools/starrcp
new file mode 100755
index 0000000000..6add5665b5
--- /dev/null
+++ b/tools/starrcp
@@ -0,0 +1,5 @@
+#!/bin/sh
+#
+
+THISDIR=`dirname $0`
+${THISDIR}/cpof ${THISDIR}/../lib \ No newline at end of file
diff --git a/tools/strapcp b/tools/strapcp
new file mode 100755
index 0000000000..61e4a61b2c
--- /dev/null
+++ b/tools/strapcp
@@ -0,0 +1,8 @@
+#!/bin/sh
+#
+
+THISDIR=`dirname $0`
+cp=`${THISDIR}/cpof ${THISDIR}/../build/strap/classes`
+fjbg=`${THISDIR}/abspath ${THISDIR}/../lib/fjbg.jar`
+
+echo ${cp}:${fjbg}
diff --git a/truncate b/tools/truncate
index b7f410e25d..b7f410e25d 100755
--- a/truncate
+++ b/tools/truncate